Line endings normalization (rebased)

This commit is contained in:
Lyncode
2012-11-01 01:07:36 +00:00
parent b10be85130
commit d269a30a15
37 changed files with 10976 additions and 10962 deletions

14
.gitattributes vendored Normal file
View File

@@ -0,0 +1,14 @@
# Auto detect text files and perform LF normalization
* text=auto
# Standard to msysgit
*.doc diff=astextplain
*.DOC diff=astextplain
*.docx diff=astextplain
*.DOCX diff=astextplain
*.dot diff=astextplain
*.DOT diff=astextplain
*.pdf diff=astextplain
*.PDF diff=astextplain
*.rtf diff=astextplain
*.RTF diff=astextplain

View File

@@ -1,407 +1,407 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.dspace.content.Item;
import org.dspace.content.DCValue;
import org.dspace.content.Collection;
import java.util.ArrayList;
import java.util.List;
/**
* Utility class to store changes to item that may occur during a batch edit.
*
* @author Stuart Lewis
*/
public class BulkEditChange
{
/** The item these changes relate to */
private Item item;
/** The List of hashtables with the new elements */
private List<DCValue> adds;
/** The List of hashtables with the removed elements */
private List<DCValue> removes;
/** The List of hashtables with the unchanged elements */
private List<DCValue> constant;
/** The List of the complete set of new values (constant + adds) */
private List<DCValue> complete;
/** The list of old collections the item used to be mapped to */
private List<Collection> oldMappedCollections;
/** The list of new collections the item has been mapped into */
private List<Collection> newMappedCollections;
/** The old owning collection */
private Collection oldOwningCollection;
/** The new owning collection */
private Collection newOwningCollection;
/** Is this a new item */
private boolean newItem;
/** Has this item been deleted? */
private boolean deleted;
/** Has this item been withdrawn? */
private boolean withdrawn;
/** Has this item been reinstated? */
private boolean reinstated;
/** Have any changes actually been made? */
private boolean empty;
/**
* Initialise a change holder for a new item
*/
public BulkEditChange()
{
// Set the item to be null
item = null;
newItem = true;
empty = true;
oldOwningCollection = null;
newOwningCollection = null;
// Initialise the arrays
adds = new ArrayList<DCValue>();
removes = new ArrayList<DCValue>();
constant = new ArrayList<DCValue>();
complete = new ArrayList<DCValue>();
oldMappedCollections = new ArrayList<Collection>();
newMappedCollections = new ArrayList<Collection>();
}
/**
* Initialise a new change holder for an existing item
*
* @param i The Item to store
*/
public BulkEditChange(Item i)
{
// Store the item
item = i;
newItem = false;
empty = true;
// Initialise the arrays
adds = new ArrayList<DCValue>();
removes = new ArrayList<DCValue>();
constant = new ArrayList<DCValue>();
complete = new ArrayList<DCValue>();
oldMappedCollections = new ArrayList<Collection>();
newMappedCollections = new ArrayList<Collection>();
}
/**
* Store the item - used when a new item is created
*
* @param i The item
*/
public void setItem(Item i)
{
// Store the item
item = i;
}
/**
* Add an added metadata value
*
* @param dcv The value to add
*/
public void registerAdd(DCValue dcv)
{
// Add the added value
adds.add(dcv);
complete.add(dcv);
empty = false;
}
/**
* Add a removed metadata value
*
* @param dcv The value to remove
*/
public void registerRemove(DCValue dcv)
{
// Add the removed value
removes.add(dcv);
empty = false;
}
/**
* Add an unchanged metadata value
*
* @param dcv The value to keep unchanged
*/
public void registerConstant(DCValue dcv)
{
// Add the removed value
constant.add(dcv);
complete.add(dcv);
}
/**
* Add a new mapped Collection
*
* @param c The new mapped Collection
*/
public void registerNewMappedCollection(Collection c)
{
// Add the new owning Collection
newMappedCollections.add(c);
empty = false;
}
/**
* Add an old mapped Collection
*
* @param c The old mapped Collection
*/
public void registerOldMappedCollection(Collection c)
{
// Add the old owning Collection (if it isn't there already, or is an old collection)
boolean found = false;
if ((this.getOldOwningCollection() != null) &&
(this.getOldOwningCollection().getHandle().equals(c.getHandle())))
{
found = true;
}
for (Collection collection : oldMappedCollections)
{
if (collection.getHandle().equals(c.getHandle()))
{
found = true;
}
}
if (!found)
{
oldMappedCollections.add(c);
empty = false;
}
}
/**
* Register a change to the owning collection
*
* @param oldC The old owning collection
* @param newC The new owning collection
*/
public void changeOwningCollection(Collection oldC, Collection newC)
{
// Store the old owning collection
oldOwningCollection = oldC;
// Store the new owning collection
newOwningCollection = newC;
empty = false;
}
/**
* Set the owning collection of an item
*
* @param newC The new owning collection
*/
public void setOwningCollection(Collection newC)
{
// Store the new owning collection
newOwningCollection = newC;
//empty = false;
}
/**
* Get the DSpace Item that these changes are applicable to.
*
* @return The item
*/
public Item getItem()
{
// Return the item
return item;
}
/**
* Get the list of elements and their values that have been added.
*
* @return the list of elements and their values that have been added.
*/
public List<DCValue> getAdds()
{
// Return the array
return adds;
}
/**
* Get the list of elements and their values that have been removed.
*
* @return the list of elements and their values that have been removed.
*/
public List<DCValue> getRemoves()
{
// Return the array
return removes;
}
/**
* Get the list of unchanged values
*
* @return the list of unchanged values
*/
public List<DCValue> getConstant()
{
// Return the array
return constant;
}
/**
* Get the list of all values
*
* @return the list of all values
*/
public List<DCValue> getComplete()
{
// Return the array
return complete;
}
/**
* Get the list of new mapped Collections
*
* @return the list of new mapped collections
*/
public List<Collection> getNewMappedCollections()
{
// Return the array
return newMappedCollections;
}
/**
* Get the list of old mapped Collections
*
* @return the list of old mapped collections
*/
public List<Collection> getOldMappedCollections()
{
// Return the array
return oldMappedCollections;
}
/**
* Get the old owning collection
*
* @return the old owning collection
*/
public Collection getOldOwningCollection()
{
// Return the old owning collection
return oldOwningCollection;
}
/**
* Get the new owning collection
*
* @return the new owning collection
*/
public Collection getNewOwningCollection()
{
// Return the new owning collection
return newOwningCollection;
}
/**
* Does this change object represent a new item?
*
* @return Whether or not this is for a new item
*/
public boolean isNewItem()
{
// Return the new item status
return newItem;
}
/**
* Does this change object represent a deleted item?
*
* @return Whether or not this is for a deleted item
*/
public boolean isDeleted()
{
// Return the new item status
return deleted;
}
/**
* Set that this item has been deleted
*/
public void setDeleted() {
// Store the setting
deleted = true;
empty = false;
}
/**
* Does this change object represent a withdrawn item?
*
* @return Whether or not this is for a withdrawn item
*/
public boolean isWithdrawn()
{
// Return the new item status
return withdrawn;
}
/**
* Set that this item has been withdrawn
*/
public void setWithdrawn() {
// Store the setting
withdrawn = true;
empty = false;
}
/**
* Does this change object represent a reinstated item?
*
* @return Whether or not this is for a reinstated item
*/
public boolean isReinstated()
{
// Return the new item status
return reinstated;
}
/**
* Set that this item has been deleted
*/
public void setReinstated() {
// Store the setting
reinstated = true;
empty = false;
}
/**
* Have any changes actually been recorded, or is this empty?
*
* @return Whether or not changes have been made
*/
public boolean hasChanges()
{
return !empty;
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.dspace.content.Item;
import org.dspace.content.DCValue;
import org.dspace.content.Collection;
import java.util.ArrayList;
import java.util.List;
/**
* Utility class to store changes to item that may occur during a batch edit.
*
* @author Stuart Lewis
*/
public class BulkEditChange
{
/** The item these changes relate to */
private Item item;
/** The List of hashtables with the new elements */
private List<DCValue> adds;
/** The List of hashtables with the removed elements */
private List<DCValue> removes;
/** The List of hashtables with the unchanged elements */
private List<DCValue> constant;
/** The List of the complete set of new values (constant + adds) */
private List<DCValue> complete;
/** The list of old collections the item used to be mapped to */
private List<Collection> oldMappedCollections;
/** The list of new collections the item has been mapped into */
private List<Collection> newMappedCollections;
/** The old owning collection */
private Collection oldOwningCollection;
/** The new owning collection */
private Collection newOwningCollection;
/** Is this a new item */
private boolean newItem;
/** Has this item been deleted? */
private boolean deleted;
/** Has this item been withdrawn? */
private boolean withdrawn;
/** Has this item been reinstated? */
private boolean reinstated;
/** Have any changes actually been made? */
private boolean empty;
/**
* Initialise a change holder for a new item
*/
public BulkEditChange()
{
// Set the item to be null
item = null;
newItem = true;
empty = true;
oldOwningCollection = null;
newOwningCollection = null;
// Initialise the arrays
adds = new ArrayList<DCValue>();
removes = new ArrayList<DCValue>();
constant = new ArrayList<DCValue>();
complete = new ArrayList<DCValue>();
oldMappedCollections = new ArrayList<Collection>();
newMappedCollections = new ArrayList<Collection>();
}
/**
* Initialise a new change holder for an existing item
*
* @param i The Item to store
*/
public BulkEditChange(Item i)
{
// Store the item
item = i;
newItem = false;
empty = true;
// Initialise the arrays
adds = new ArrayList<DCValue>();
removes = new ArrayList<DCValue>();
constant = new ArrayList<DCValue>();
complete = new ArrayList<DCValue>();
oldMappedCollections = new ArrayList<Collection>();
newMappedCollections = new ArrayList<Collection>();
}
/**
* Store the item - used when a new item is created
*
* @param i The item
*/
public void setItem(Item i)
{
// Store the item
item = i;
}
/**
* Add an added metadata value
*
* @param dcv The value to add
*/
public void registerAdd(DCValue dcv)
{
// Add the added value
adds.add(dcv);
complete.add(dcv);
empty = false;
}
/**
* Add a removed metadata value
*
* @param dcv The value to remove
*/
public void registerRemove(DCValue dcv)
{
// Add the removed value
removes.add(dcv);
empty = false;
}
/**
* Add an unchanged metadata value
*
* @param dcv The value to keep unchanged
*/
public void registerConstant(DCValue dcv)
{
// Add the removed value
constant.add(dcv);
complete.add(dcv);
}
/**
* Add a new mapped Collection
*
* @param c The new mapped Collection
*/
public void registerNewMappedCollection(Collection c)
{
// Add the new owning Collection
newMappedCollections.add(c);
empty = false;
}
/**
* Add an old mapped Collection
*
* @param c The old mapped Collection
*/
public void registerOldMappedCollection(Collection c)
{
// Add the old owning Collection (if it isn't there already, or is an old collection)
boolean found = false;
if ((this.getOldOwningCollection() != null) &&
(this.getOldOwningCollection().getHandle().equals(c.getHandle())))
{
found = true;
}
for (Collection collection : oldMappedCollections)
{
if (collection.getHandle().equals(c.getHandle()))
{
found = true;
}
}
if (!found)
{
oldMappedCollections.add(c);
empty = false;
}
}
/**
* Register a change to the owning collection
*
* @param oldC The old owning collection
* @param newC The new owning collection
*/
public void changeOwningCollection(Collection oldC, Collection newC)
{
// Store the old owning collection
oldOwningCollection = oldC;
// Store the new owning collection
newOwningCollection = newC;
empty = false;
}
/**
* Set the owning collection of an item
*
* @param newC The new owning collection
*/
public void setOwningCollection(Collection newC)
{
// Store the new owning collection
newOwningCollection = newC;
//empty = false;
}
/**
* Get the DSpace Item that these changes are applicable to.
*
* @return The item
*/
public Item getItem()
{
// Return the item
return item;
}
/**
* Get the list of elements and their values that have been added.
*
* @return the list of elements and their values that have been added.
*/
public List<DCValue> getAdds()
{
// Return the array
return adds;
}
/**
* Get the list of elements and their values that have been removed.
*
* @return the list of elements and their values that have been removed.
*/
public List<DCValue> getRemoves()
{
// Return the array
return removes;
}
/**
* Get the list of unchanged values
*
* @return the list of unchanged values
*/
public List<DCValue> getConstant()
{
// Return the array
return constant;
}
/**
* Get the list of all values
*
* @return the list of all values
*/
public List<DCValue> getComplete()
{
// Return the array
return complete;
}
/**
* Get the list of new mapped Collections
*
* @return the list of new mapped collections
*/
public List<Collection> getNewMappedCollections()
{
// Return the array
return newMappedCollections;
}
/**
* Get the list of old mapped Collections
*
* @return the list of old mapped collections
*/
public List<Collection> getOldMappedCollections()
{
// Return the array
return oldMappedCollections;
}
/**
* Get the old owning collection
*
* @return the old owning collection
*/
public Collection getOldOwningCollection()
{
// Return the old owning collection
return oldOwningCollection;
}
/**
* Get the new owning collection
*
* @return the new owning collection
*/
public Collection getNewOwningCollection()
{
// Return the new owning collection
return newOwningCollection;
}
/**
* Does this change object represent a new item?
*
* @return Whether or not this is for a new item
*/
public boolean isNewItem()
{
// Return the new item status
return newItem;
}
/**
* Does this change object represent a deleted item?
*
* @return Whether or not this is for a deleted item
*/
public boolean isDeleted()
{
// Return the new item status
return deleted;
}
/**
* Set that this item has been deleted
*/
public void setDeleted() {
// Store the setting
deleted = true;
empty = false;
}
/**
* Does this change object represent a withdrawn item?
*
* @return Whether or not this is for a withdrawn item
*/
public boolean isWithdrawn()
{
// Return the new item status
return withdrawn;
}
/**
* Set that this item has been withdrawn
*/
public void setWithdrawn() {
// Store the setting
withdrawn = true;
empty = false;
}
/**
* Does this change object represent a reinstated item?
*
* @return Whether or not this is for a reinstated item
*/
public boolean isReinstated()
{
// Return the new item status
return reinstated;
}
/**
* Set that this item has been deleted
*/
public void setReinstated() {
// Store the setting
reinstated = true;
empty = false;
}
/**
* Have any changes actually been recorded, or is this empty?
*
* @return Whether or not changes have been made
*/
public boolean hasChanges()
{
return !empty;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,193 +1,193 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Utility class to store a line from a CSV file
*
* @author Stuart Lewis
*/
public class DSpaceCSVLine implements Serializable
{
/** The item id of the item represented by this line. -1 is for a new item */
private int id;
/** The elements in this line in a hashtable, keyed by the metadata type */
private Map<String, ArrayList> items;
/**
* Create a new CSV line
*
* @param itemId The item ID of the line
*/
public DSpaceCSVLine(int itemId)
{
// Store the ID + separator, and initialise the hashtable
this.id = itemId;
items = new HashMap<String, ArrayList>();
}
/**
* Create a new CSV line for a new item
*/
public DSpaceCSVLine()
{
// Set the ID to be -1, and initialise the hashtable
this.id = -1;
this.items = new HashMap<String, ArrayList>();
}
/**
* Get the item ID that this line represents
*
* @return The item ID
*/
public int getID()
{
// Return the ID
return id;
}
/**
* Add a new metadata value to this line
*
* @param key The metadata key (e.g. dc.contributor.author)
* @param value The metadata value
*/
public void add(String key, String value)
{
// Create the array list if we need to
if (items.get(key) == null)
{
items.put(key, new ArrayList<String>());
}
// Store the item if it is not null
if (value != null)
{
items.get(key).add(value);
}
}
/**
* Get all the values that match the given metadata key. Will be null if none exist.
*
* @param key The metadata key
* @return All the elements that match
*/
public List<String> get(String key)
{
// Return any relevant values
return items.get(key);
}
/**
* Get any action associated with this line
*
* @return The action (may be blank, 'withdraw', 'reinstate' or 'delete')
*/
public String getAction()
{
if (items.containsKey("action")) {
ArrayList actions = items.get("action");
if (actions.size() > 0) {
return ((String)actions.get(0)).trim();
}
}
return "";
}
/**
* Get all the metadata keys that are represented in this line
*
* @return An enumeration of all the keys
*/
public Set<String> keys()
{
// Return the keys
return items.keySet();
}
/**
* Write this line out as a CSV formatted string, in the order given by the headings provided
*
* @param headings The headings which define the order the elements must be presented in
* @return The CSV formatted String
*/
protected String toCSV(List<String> headings)
{
StringBuilder bits = new StringBuilder();
// Add the id
bits.append("\"").append(id).append("\"").append(DSpaceCSV.fieldSeparator);
bits.append(valueToCSV(items.get("collection")));
// Add the rest of the elements
for (String heading : headings)
{
bits.append(DSpaceCSV.fieldSeparator);
List<String> values = items.get(heading);
if (values != null && !"collection".equals(heading))
{
bits.append(valueToCSV(values));
}
}
return bits.toString();
}
/**
* Internal method to create a CSV formatted String joining a given set of elements
*
* @param values The values to create the string from
* @return The line as a CSV formatted String
*/
protected String valueToCSV(List<String> values)
{
// Check there is some content
if (values == null)
{
return "";
}
// Get on with the work
String s;
if (values.size() == 1)
{
s = values.get(0);
}
else
{
// Concatenate any fields together
StringBuilder str = new StringBuilder();
for (String value : values)
{
if (str.length() > 0)
{
str.append(DSpaceCSV.valueSeparator);
}
str.append(value);
}
s = str.toString();
}
// Replace internal quotes with two sets of quotes
return "\"" + s.replaceAll("\"", "\"\"") + "\"";
}
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Utility class to store a line from a CSV file
*
* @author Stuart Lewis
*/
public class DSpaceCSVLine implements Serializable
{
/** The item id of the item represented by this line. -1 is for a new item */
private int id;
/** The elements in this line in a hashtable, keyed by the metadata type */
private Map<String, ArrayList> items;
/**
* Create a new CSV line
*
* @param itemId The item ID of the line
*/
public DSpaceCSVLine(int itemId)
{
// Store the ID + separator, and initialise the hashtable
this.id = itemId;
items = new HashMap<String, ArrayList>();
}
/**
* Create a new CSV line for a new item
*/
public DSpaceCSVLine()
{
// Set the ID to be -1, and initialise the hashtable
this.id = -1;
this.items = new HashMap<String, ArrayList>();
}
/**
* Get the item ID that this line represents
*
* @return The item ID
*/
public int getID()
{
// Return the ID
return id;
}
/**
* Add a new metadata value to this line
*
* @param key The metadata key (e.g. dc.contributor.author)
* @param value The metadata value
*/
public void add(String key, String value)
{
// Create the array list if we need to
if (items.get(key) == null)
{
items.put(key, new ArrayList<String>());
}
// Store the item if it is not null
if (value != null)
{
items.get(key).add(value);
}
}
/**
* Get all the values that match the given metadata key. Will be null if none exist.
*
* @param key The metadata key
* @return All the elements that match
*/
public List<String> get(String key)
{
// Return any relevant values
return items.get(key);
}
/**
* Get any action associated with this line
*
* @return The action (may be blank, 'withdraw', 'reinstate' or 'delete')
*/
public String getAction()
{
if (items.containsKey("action")) {
ArrayList actions = items.get("action");
if (actions.size() > 0) {
return ((String)actions.get(0)).trim();
}
}
return "";
}
/**
* Get all the metadata keys that are represented in this line
*
* @return An enumeration of all the keys
*/
public Set<String> keys()
{
// Return the keys
return items.keySet();
}
/**
* Write this line out as a CSV formatted string, in the order given by the headings provided
*
* @param headings The headings which define the order the elements must be presented in
* @return The CSV formatted String
*/
protected String toCSV(List<String> headings)
{
StringBuilder bits = new StringBuilder();
// Add the id
bits.append("\"").append(id).append("\"").append(DSpaceCSV.fieldSeparator);
bits.append(valueToCSV(items.get("collection")));
// Add the rest of the elements
for (String heading : headings)
{
bits.append(DSpaceCSV.fieldSeparator);
List<String> values = items.get(heading);
if (values != null && !"collection".equals(heading))
{
bits.append(valueToCSV(values));
}
}
return bits.toString();
}
/**
* Internal method to create a CSV formatted String joining a given set of elements
*
* @param values The values to create the string from
* @return The line as a CSV formatted String
*/
protected String valueToCSV(List<String> values)
{
// Check there is some content
if (values == null)
{
return "";
}
// Get on with the work
String s;
if (values.size() == 1)
{
s = values.get(0);
}
else
{
// Concatenate any fields together
StringBuilder str = new StringBuilder();
for (String value : values)
{
if (str.length() > 0)
{
str.append(DSpaceCSV.valueSeparator);
}
str.append(value);
}
s = str.toString();
}
// Replace internal quotes with two sets of quotes
return "\"" + s.replaceAll("\"", "\"\"") + "\"";
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,147 +1,147 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Formatter;
import java.util.Locale;
import java.util.Map;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.license.FormattableArgument;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
/**
* Utility class to manage generation and storing of the license text that the
* submitter has to grant/granted for archiving the item
*
* @author bollini
*
*/
public class LicenseUtils
{
/**
* Return the text of the license that the user has granted/must grant
* before for submit the item. The license text is build using the template
* defined for the collection if any or the wide site configuration. In the
* license text the following substitution can be used. {0} the eperson
* firstname<br>
* {1} the eperson lastname<br>
* {2} the eperson email<br>
* {3} the current date<br>
* {4} the collection object that will be formatted using the appropriate
* LicenseArgumentFormatter plugin (if defined)<br>
* {5} the item object that will be formatted using the appropriate
* LicenseArgumentFormatter plugin (if defined)<br>
* {6} the eperson object that will be formatted using the appropriate
* LicenseArgumentFormatter plugin (if defined)<br>
* {x} any addition argument supplied wrapped in the
* LicenseArgumentFormatter based on his type (map key)
*
* @see LicenseArgumentFormatter
* @param locale
* @param collection
* @param item
* @param eperson
* @param additionalInfo
* @return the license text obtained substituting the provided argument in
* the license template
*/
public static String getLicenseText(Locale locale, Collection collection,
Item item, EPerson eperson, Map<String, Object> additionalInfo)
{
Formatter formatter = new Formatter(locale);
// EPerson firstname, lastname, email and the current date
// will be available as separate arguments to make more simple produce
// "tradition" text license
// collection, item and eperson object will be also available
int numArgs = 7 + (additionalInfo != null ? additionalInfo.size() : 0);
Object[] args = new Object[numArgs];
args[0] = eperson.getFirstName();
args[1] = eperson.getLastName();
args[2] = eperson.getEmail();
args[3] = new java.util.Date();
args[4] = new FormattableArgument("collection", collection);
args[5] = new FormattableArgument("item", item);
args[6] = new FormattableArgument("eperson", eperson);
if (additionalInfo != null)
{
int i = 7; // Start is next index after previous args
for (Map.Entry<String, Object> info : additionalInfo.entrySet())
{
args[i] = new FormattableArgument(info.getKey(), info.getValue());
i++;
}
}
String licenseTemplate = collection.getLicense();
return formatter.format(licenseTemplate, args).toString();
}
/**
* Utility method if no additional arguments has need to be supplied to the
* license template. (i.e. call the full getLicenseText supplying
* <code>null</code> for the additionalInfo argument)
*
* @param locale
* @param collection
* @param item
* @param eperson
* @return
*/
public static String getLicenseText(Locale locale, Collection collection,
Item item, EPerson eperson)
{
return getLicenseText(locale, collection, item, eperson, null);
}
/**
* Store a copy of the license a user granted in the item.
*
* @param context
* the dspace context
* @param item
* the item object of the license
* @param licenseText
* the license the user granted
* @throws SQLException
* @throws IOException
* @throws AuthorizeException
*/
public static void grantLicense(Context context, Item item,
String licenseText) throws SQLException, IOException,
AuthorizeException
{
// Put together text to store
// String licenseText = "License granted by " + eperson.getFullName()
// + " (" + eperson.getEmail() + ") on "
// + DCDate.getCurrent().toString() + " (GMT):\n\n" + license;
// Store text as a bitstream
byte[] licenseBytes = licenseText.getBytes();
ByteArrayInputStream bais = new ByteArrayInputStream(licenseBytes);
Bitstream b = item.createSingleBitstream(bais, "LICENSE");
// Now set the format and name of the bitstream
b.setName("license.txt");
b.setSource("Written by org.dspace.content.LicenseUtils");
// Find the License format
BitstreamFormat bf = BitstreamFormat.findByShortDescription(context,
"License");
b.setFormat(bf);
b.update();
}
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Formatter;
import java.util.Locale;
import java.util.Map;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.license.FormattableArgument;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
/**
* Utility class to manage generation and storing of the license text that the
* submitter has to grant/granted for archiving the item
*
* @author bollini
*
*/
public class LicenseUtils
{
/**
* Return the text of the license that the user has granted/must grant
* before for submit the item. The license text is build using the template
* defined for the collection if any or the wide site configuration. In the
* license text the following substitution can be used. {0} the eperson
* firstname<br>
* {1} the eperson lastname<br>
* {2} the eperson email<br>
* {3} the current date<br>
* {4} the collection object that will be formatted using the appropriate
* LicenseArgumentFormatter plugin (if defined)<br>
* {5} the item object that will be formatted using the appropriate
* LicenseArgumentFormatter plugin (if defined)<br>
* {6} the eperson object that will be formatted using the appropriate
* LicenseArgumentFormatter plugin (if defined)<br>
* {x} any addition argument supplied wrapped in the
* LicenseArgumentFormatter based on his type (map key)
*
* @see LicenseArgumentFormatter
* @param locale
* @param collection
* @param item
* @param eperson
* @param additionalInfo
* @return the license text obtained substituting the provided argument in
* the license template
*/
public static String getLicenseText(Locale locale, Collection collection,
Item item, EPerson eperson, Map<String, Object> additionalInfo)
{
Formatter formatter = new Formatter(locale);
// EPerson firstname, lastname, email and the current date
// will be available as separate arguments to make more simple produce
// "tradition" text license
// collection, item and eperson object will be also available
int numArgs = 7 + (additionalInfo != null ? additionalInfo.size() : 0);
Object[] args = new Object[numArgs];
args[0] = eperson.getFirstName();
args[1] = eperson.getLastName();
args[2] = eperson.getEmail();
args[3] = new java.util.Date();
args[4] = new FormattableArgument("collection", collection);
args[5] = new FormattableArgument("item", item);
args[6] = new FormattableArgument("eperson", eperson);
if (additionalInfo != null)
{
int i = 7; // Start is next index after previous args
for (Map.Entry<String, Object> info : additionalInfo.entrySet())
{
args[i] = new FormattableArgument(info.getKey(), info.getValue());
i++;
}
}
String licenseTemplate = collection.getLicense();
return formatter.format(licenseTemplate, args).toString();
}
/**
* Utility method if no additional arguments has need to be supplied to the
* license template. (i.e. call the full getLicenseText supplying
* <code>null</code> for the additionalInfo argument)
*
* @param locale
* @param collection
* @param item
* @param eperson
* @return
*/
public static String getLicenseText(Locale locale, Collection collection,
Item item, EPerson eperson)
{
return getLicenseText(locale, collection, item, eperson, null);
}
/**
* Store a copy of the license a user granted in the item.
*
* @param context
* the dspace context
* @param item
* the item object of the license
* @param licenseText
* the license the user granted
* @throws SQLException
* @throws IOException
* @throws AuthorizeException
*/
public static void grantLicense(Context context, Item item,
String licenseText) throws SQLException, IOException,
AuthorizeException
{
// Put together text to store
// String licenseText = "License granted by " + eperson.getFullName()
// + " (" + eperson.getEmail() + ") on "
// + DCDate.getCurrent().toString() + " (GMT):\n\n" + license;
// Store text as a bitstream
byte[] licenseBytes = licenseText.getBytes();
ByteArrayInputStream bais = new ByteArrayInputStream(licenseBytes);
Bitstream b = item.createSingleBitstream(bais, "LICENSE");
// Now set the format and name of the bitstream
b.setName("license.txt");
b.setSource("Written by org.dspace.content.LicenseUtils");
// Find the License format
BitstreamFormat bf = BitstreamFormat.findByShortDescription(context,
"License");
b.setFormat(bf);
b.update();
}
}

View File

@@ -1,89 +1,89 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
/**
* This is a *very* stupid test fixture for authority control with AuthorityVariantsSupport.
*
* @author Andrea Bollini (CILEA)
*/
public class TestAuthority implements ChoiceAuthority, AuthorityVariantsSupport
{
@Override
public List<String> getVariants(String key, String locale)
{
if (StringUtils.isNotBlank(key))
{
List<String> variants = new ArrayList<String>();
for (int i = 0; i < 3; i++)
{
variants.add(key+"_variant#"+i);
}
return variants;
}
return null;
}
@Override
public Choices getMatches(String field, String text, int collection,
int start, int limit, String locale)
{
Choices choices = new Choices(false);
if (StringUtils.isNotBlank(text))
{
List<Choice> choiceValues = new ArrayList<Choice>();
for (int i = 0; i < 3; i++)
{
choiceValues.add(new Choice(text + "_authority#" + i, text
+ "_value#" + i, text + "_label#" + i));
}
choices = new Choices(
(Choice[]) choiceValues.toArray(new Choice[choiceValues
.size()]), 0, 3, Choices.CF_AMBIGUOUS, false);
}
return choices;
}
@Override
public Choices getBestMatch(String field, String text, int collection,
String locale)
{
Choices choices = new Choices(false);
if (StringUtils.isNotBlank(text))
{
List<Choice> choiceValues = new ArrayList<Choice>();
choiceValues.add(new Choice(text + "_authoritybest", text
+ "_valuebest", text + "_labelbest"));
choices = new Choices(
(Choice[]) choiceValues.toArray(new Choice[choiceValues
.size()]), 0, 3, Choices.CF_UNCERTAIN, false);
}
return choices;
}
@Override
public String getLabel(String field, String key, String locale)
{
if (StringUtils.isNotBlank(key))
{
return key.replaceAll("authority", "label");
}
return "Unknown";
}
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
/**
* This is a *very* stupid test fixture for authority control with AuthorityVariantsSupport.
*
* @author Andrea Bollini (CILEA)
*/
public class TestAuthority implements ChoiceAuthority, AuthorityVariantsSupport
{
@Override
public List<String> getVariants(String key, String locale)
{
if (StringUtils.isNotBlank(key))
{
List<String> variants = new ArrayList<String>();
for (int i = 0; i < 3; i++)
{
variants.add(key+"_variant#"+i);
}
return variants;
}
return null;
}
@Override
public Choices getMatches(String field, String text, int collection,
int start, int limit, String locale)
{
Choices choices = new Choices(false);
if (StringUtils.isNotBlank(text))
{
List<Choice> choiceValues = new ArrayList<Choice>();
for (int i = 0; i < 3; i++)
{
choiceValues.add(new Choice(text + "_authority#" + i, text
+ "_value#" + i, text + "_label#" + i));
}
choices = new Choices(
(Choice[]) choiceValues.toArray(new Choice[choiceValues
.size()]), 0, 3, Choices.CF_AMBIGUOUS, false);
}
return choices;
}
@Override
public Choices getBestMatch(String field, String text, int collection,
String locale)
{
Choices choices = new Choices(false);
if (StringUtils.isNotBlank(text))
{
List<Choice> choiceValues = new ArrayList<Choice>();
choiceValues.add(new Choice(text + "_authoritybest", text
+ "_valuebest", text + "_labelbest"));
choices = new Choices(
(Choice[]) choiceValues.toArray(new Choice[choiceValues
.size()]), 0, 3, Choices.CF_UNCERTAIN, false);
}
return choices;
}
@Override
public String getLabel(String field, String key, String locale)
{
if (StringUtils.isNotBlank(key))
{
return key.replaceAll("authority", "label");
}
return "Unknown";
}
}

View File

@@ -1,174 +1,174 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import java.io.IOException;
import java.util.List;
import java.sql.SQLException;
import org.dspace.core.Context;
import org.dspace.content.DSpaceObject;
import org.dspace.authorize.AuthorizeException;
import org.jdom.Element;
import org.jdom.Namespace;
/**
* Crosswalk descriptive metadata to and from DIM (DSpace Intermediate
* Metadata) format, strictly for the purpose of including a precise and
* complete record of the DMD in an AIP. Although the DIM format was never
* intended to be used outside of DSpace, it is admirably suited to
* describing the exact state of the descriptive MD stored in the RDBMS.
* All crosswalks to standard formats such as MODS and even DC are necessarily
* "lossy" and inexact. Since the purpose of an AIP is to preserve and restore
* the state of an object exactly, DIM is the preferred format for
* recording its descriptive MD.
* <p>
* In order to allow external applications to make sense of DSpace AIPs for
* preservation purposes, we recommend adding a parallel descriptive
* metadata section in one of the preferred standard formats such as MODS
* as well as the DIM.
*
* @author Larry Stone
* @version $Revision: 1.2 $
*/
public class AIPDIMCrosswalk
implements DisseminationCrosswalk, IngestionCrosswalk
{
/**
* Get XML namespaces of the elements this crosswalk may return.
* Returns the XML namespaces (as JDOM objects) of the root element.
*
* @return array of namespaces, which may be empty.
*/
public Namespace[] getNamespaces()
{
Namespace result[] = new Namespace[1];
result[0] = XSLTCrosswalk.DIM_NS;
return result;
}
/**
* Get the XML Schema location(s) of the target metadata format.
* Returns the string value of the <code>xsi:schemaLocation</code>
* attribute that should be applied to the generated XML.
* <p>
* It may return the empty string if no schema is known, but crosswalk
* authors are strongly encouraged to implement this call so their output
* XML can be validated correctly.
* @return SchemaLocation string, including URI namespace, followed by
* whitespace and URI of XML schema document, or empty string if unknown.
*/
public String getSchemaLocation()
{
return "";
}
/**
* Predicate: Can this disseminator crosswalk the given object.
* Needed by OAI-PMH server implementation.
*
* @param dso dspace object, e.g. an <code>Item</code>.
* @return true when disseminator is capable of producing metadata.
*/
public boolean canDisseminate(DSpaceObject dso)
{
return true;
}
/**
* Predicate: Does this disseminator prefer to return a list of Elements,
* rather than a single root Element?
* <p>
* Some metadata formats have an XML schema without a root element,
* for example, the Dublin Core and Qualified Dublin Core formats.
* This would be <code>true</code> for a crosswalk into QDC, since
* it would "prefer" to return a list, since any root element it has
* to produce would have to be part of a nonstandard schema. In
* most cases your implementation will want to return
* <code>false</code>
*
* @return true when disseminator prefers you call disseminateList().
*/
public boolean preferList()
{
return false;
}
/**
* Execute crosswalk, returning List of XML elements.
* Returns a <code>List</code> of JDOM <code>Element</code> objects representing
* the XML produced by the crosswalk. This is typically called when
* a list of fields is desired, e.g. for embedding in a METS document
* <code>xmlData</code> field.
* <p>
* When there are no results, an
* empty list is returned, but never <code>null</code>.
*
* @param dso the DSpace Object whose metadata to export.
* @return results of crosswalk as list of XML elements.
*
* @throws CrosswalkInternalException (<code>CrosswalkException</code>) failure of the crosswalk itself.
* @throws CrosswalkObjectNotSupported (<code>CrosswalkException</code>) Cannot crosswalk this kind of DSpace object.
* @throws IOException I/O failure in services this calls
* @throws SQLException Database failure in services this calls
* @throws AuthorizeException current user not authorized for this operation.
*/
public List<Element> disseminateList(DSpaceObject dso)
throws CrosswalkException, IOException, SQLException,
AuthorizeException
{
Element dim = disseminateElement(dso);
return dim.getChildren();
}
/**
* Execute crosswalk, returning one XML root element as
* a JDOM <code>Element</code> object.
* This is typically the root element of a document.
* <p>
*
* @param dso the DSpace Object whose metadata to export.
* @return root Element of the target metadata, never <code>null</code>
*
* @throws CrosswalkInternalException (<code>CrosswalkException</code>) failure of the crosswalk itself.
* @throws CrosswalkObjectNotSupported (<code>CrosswalkException</code>) Cannot crosswalk this kind of DSpace object.
* @throws IOException I/O failure in services this calls
* @throws SQLException Database failure in services this calls
* @throws AuthorizeException current user not authorized for this operation.
*/
public Element disseminateElement(DSpaceObject dso)
throws CrosswalkException, IOException, SQLException,
AuthorizeException
{
return XSLTDisseminationCrosswalk.createDIM(dso);
}
/**
* Ingest a whole document. Build Document object around root element,
* and feed that to the transformation, since it may get handled
* differently than a List of metadata elements.
*/
public void ingest(Context context, DSpaceObject dso, Element root)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
ingest(context, dso, root.getChildren());
}
/**
* Fields correspond directly to Item.addMetadata() calls so
* they are simply executed.
*/
public void ingest(Context context, DSpaceObject dso, List<Element> dimList)
throws CrosswalkException,
IOException, SQLException, AuthorizeException
{
XSLTIngestionCrosswalk.ingestDIM(context, dso, dimList);
}
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import java.io.IOException;
import java.util.List;
import java.sql.SQLException;
import org.dspace.core.Context;
import org.dspace.content.DSpaceObject;
import org.dspace.authorize.AuthorizeException;
import org.jdom.Element;
import org.jdom.Namespace;
/**
* Crosswalk descriptive metadata to and from DIM (DSpace Intermediate
* Metadata) format, strictly for the purpose of including a precise and
* complete record of the DMD in an AIP. Although the DIM format was never
* intended to be used outside of DSpace, it is admirably suited to
* describing the exact state of the descriptive MD stored in the RDBMS.
* All crosswalks to standard formats such as MODS and even DC are necessarily
* "lossy" and inexact. Since the purpose of an AIP is to preserve and restore
* the state of an object exactly, DIM is the preferred format for
* recording its descriptive MD.
* <p>
* In order to allow external applications to make sense of DSpace AIPs for
* preservation purposes, we recommend adding a parallel descriptive
* metadata section in one of the preferred standard formats such as MODS
* as well as the DIM.
*
* @author Larry Stone
* @version $Revision: 1.2 $
*/
public class AIPDIMCrosswalk
implements DisseminationCrosswalk, IngestionCrosswalk
{
/**
* Get XML namespaces of the elements this crosswalk may return.
* Returns the XML namespaces (as JDOM objects) of the root element.
*
* @return array of namespaces, which may be empty.
*/
public Namespace[] getNamespaces()
{
Namespace result[] = new Namespace[1];
result[0] = XSLTCrosswalk.DIM_NS;
return result;
}
/**
* Get the XML Schema location(s) of the target metadata format.
* Returns the string value of the <code>xsi:schemaLocation</code>
* attribute that should be applied to the generated XML.
* <p>
* It may return the empty string if no schema is known, but crosswalk
* authors are strongly encouraged to implement this call so their output
* XML can be validated correctly.
* @return SchemaLocation string, including URI namespace, followed by
* whitespace and URI of XML schema document, or empty string if unknown.
*/
public String getSchemaLocation()
{
return "";
}
/**
* Predicate: Can this disseminator crosswalk the given object.
* Needed by OAI-PMH server implementation.
*
* @param dso dspace object, e.g. an <code>Item</code>.
* @return true when disseminator is capable of producing metadata.
*/
public boolean canDisseminate(DSpaceObject dso)
{
return true;
}
/**
* Predicate: Does this disseminator prefer to return a list of Elements,
* rather than a single root Element?
* <p>
* Some metadata formats have an XML schema without a root element,
* for example, the Dublin Core and Qualified Dublin Core formats.
* This would be <code>true</code> for a crosswalk into QDC, since
* it would "prefer" to return a list, since any root element it has
* to produce would have to be part of a nonstandard schema. In
* most cases your implementation will want to return
* <code>false</code>
*
* @return true when disseminator prefers you call disseminateList().
*/
public boolean preferList()
{
return false;
}
/**
* Execute crosswalk, returning List of XML elements.
* Returns a <code>List</code> of JDOM <code>Element</code> objects representing
* the XML produced by the crosswalk. This is typically called when
* a list of fields is desired, e.g. for embedding in a METS document
* <code>xmlData</code> field.
* <p>
* When there are no results, an
* empty list is returned, but never <code>null</code>.
*
* @param dso the DSpace Object whose metadata to export.
* @return results of crosswalk as list of XML elements.
*
* @throws CrosswalkInternalException (<code>CrosswalkException</code>) failure of the crosswalk itself.
* @throws CrosswalkObjectNotSupported (<code>CrosswalkException</code>) Cannot crosswalk this kind of DSpace object.
* @throws IOException I/O failure in services this calls
* @throws SQLException Database failure in services this calls
* @throws AuthorizeException current user not authorized for this operation.
*/
public List<Element> disseminateList(DSpaceObject dso)
throws CrosswalkException, IOException, SQLException,
AuthorizeException
{
Element dim = disseminateElement(dso);
return dim.getChildren();
}
/**
* Execute crosswalk, returning one XML root element as
* a JDOM <code>Element</code> object.
* This is typically the root element of a document.
* <p>
*
* @param dso the DSpace Object whose metadata to export.
* @return root Element of the target metadata, never <code>null</code>
*
* @throws CrosswalkInternalException (<code>CrosswalkException</code>) failure of the crosswalk itself.
* @throws CrosswalkObjectNotSupported (<code>CrosswalkException</code>) Cannot crosswalk this kind of DSpace object.
* @throws IOException I/O failure in services this calls
* @throws SQLException Database failure in services this calls
* @throws AuthorizeException current user not authorized for this operation.
*/
public Element disseminateElement(DSpaceObject dso)
throws CrosswalkException, IOException, SQLException,
AuthorizeException
{
return XSLTDisseminationCrosswalk.createDIM(dso);
}
/**
* Ingest a whole document. Build Document object around root element,
* and feed that to the transformation, since it may get handled
* differently than a List of metadata elements.
*/
public void ingest(Context context, DSpaceObject dso, Element root)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
ingest(context, dso, root.getChildren());
}
/**
* Fields correspond directly to Item.addMetadata() calls so
* they are simply executed.
*/
public void ingest(Context context, DSpaceObject dso, List<Element> dimList)
throws CrosswalkException,
IOException, SQLException, AuthorizeException
{
XSLTIngestionCrosswalk.ingestDIM(context, dso, dimList);
}
}

View File

@@ -1,69 +1,69 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import java.io.OutputStream;
import java.io.IOException;
import java.sql.SQLException;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Bitstream;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.Utils;
import org.dspace.license.CreativeCommons;
/**
* Export the item's Creative Commons license, RDF form.
*
* @author Larry Stone
* @version $Revision: 1.0 $
*/
public class CreativeCommonsRDFStreamDisseminationCrosswalk
implements StreamDisseminationCrosswalk
{
/** log4j logger */
private static Logger log = Logger.getLogger(CreativeCommonsRDFStreamDisseminationCrosswalk.class);
public boolean canDisseminate(Context context, DSpaceObject dso)
{
try
{
return dso.getType() == Constants.ITEM &&
CreativeCommons.getLicenseRdfBitstream((Item)dso) != null;
}
catch (Exception e)
{
log.error("Failed getting CC license", e);
return false;
}
}
public void disseminate(Context context, DSpaceObject dso, OutputStream out)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
if (dso.getType() == Constants.ITEM)
{
Bitstream cc = CreativeCommons.getLicenseRdfBitstream((Item)dso);
if (cc != null)
{
Utils.copy(cc.retrieve(), out);
out.close();
}
}
}
public String getMIMEType()
{
return "text/xml";
}
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import java.io.OutputStream;
import java.io.IOException;
import java.sql.SQLException;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Bitstream;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.Utils;
import org.dspace.license.CreativeCommons;
/**
* Export the item's Creative Commons license, RDF form.
*
* @author Larry Stone
* @version $Revision: 1.0 $
*/
public class CreativeCommonsRDFStreamDisseminationCrosswalk
implements StreamDisseminationCrosswalk
{
/** log4j logger */
private static Logger log = Logger.getLogger(CreativeCommonsRDFStreamDisseminationCrosswalk.class);
public boolean canDisseminate(Context context, DSpaceObject dso)
{
try
{
return dso.getType() == Constants.ITEM &&
CreativeCommons.getLicenseRdfBitstream((Item)dso) != null;
}
catch (Exception e)
{
log.error("Failed getting CC license", e);
return false;
}
}
public void disseminate(Context context, DSpaceObject dso, OutputStream out)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
if (dso.getType() == Constants.ITEM)
{
Bitstream cc = CreativeCommons.getLicenseRdfBitstream((Item)dso);
if (cc != null)
{
Utils.copy(cc.retrieve(), out);
out.close();
}
}
}
public String getMIMEType()
{
return "text/xml";
}
}

View File

@@ -1,62 +1,62 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import java.io.InputStream;
import java.io.IOException;
import java.sql.SQLException;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.license.CreativeCommons;
/**
* Ingest a Creative Commons license, RDF form.
* <p>
* Note that this is NOT needed when ingesting a DSpace AIP, since the
* CC license is stored as a Bitstream (or two) in a dedicated Bundle;
* the normal apparatus of ingestig the AIP will restore that Bitstream
* with its proper name and thus the presence of the CC license.
* <p>
* This crosswalk should only be used when ingesting other kinds of SIPs.
*
* @author Larry Stone
* @version $Revision: 1.0 $
*/
public class CreativeCommonsRDFStreamIngestionCrosswalk
implements StreamIngestionCrosswalk
{
/** log4j logger */
private static Logger log = Logger.getLogger(CreativeCommonsRDFStreamIngestionCrosswalk.class);
public void ingest(Context context, DSpaceObject dso, InputStream in, String MIMEType)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
// If package includes a Creative Commons license, add that:
if (dso.getType() == Constants.ITEM)
{
if (log.isDebugEnabled())
{
log.debug("Reading a Creative Commons license, MIMEtype=" + MIMEType);
}
CreativeCommons.setLicense(context, (Item)dso, in, MIMEType);
}
}
public String getMIMEType()
{
return "text/rdf";
}
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import java.io.InputStream;
import java.io.IOException;
import java.sql.SQLException;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.license.CreativeCommons;
/**
* Ingest a Creative Commons license, RDF form.
* <p>
* Note that this is NOT needed when ingesting a DSpace AIP, since the
* CC license is stored as a Bitstream (or two) in a dedicated Bundle;
* the normal apparatus of ingestig the AIP will restore that Bitstream
* with its proper name and thus the presence of the CC license.
* <p>
* This crosswalk should only be used when ingesting other kinds of SIPs.
*
* @author Larry Stone
* @version $Revision: 1.0 $
*/
public class CreativeCommonsRDFStreamIngestionCrosswalk
implements StreamIngestionCrosswalk
{
/** log4j logger */
private static Logger log = Logger.getLogger(CreativeCommonsRDFStreamIngestionCrosswalk.class);
public void ingest(Context context, DSpaceObject dso, InputStream in, String MIMEType)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
// If package includes a Creative Commons license, add that:
if (dso.getType() == Constants.ITEM)
{
if (log.isDebugEnabled())
{
log.debug("Reading a Creative Commons license, MIMEtype=" + MIMEType);
}
CreativeCommons.setLicense(context, (Item)dso, in, MIMEType);
}
}
public String getMIMEType()
{
return "text/rdf";
}
}

View File

@@ -1,69 +1,69 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import java.io.OutputStream;
import java.io.IOException;
import java.sql.SQLException;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.Bitstream;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.Utils;
import org.dspace.license.CreativeCommons;
/**
* Export the object's Creative Commons license, text form.
*
* @author Larry Stone
* @version $Revision: 1.0 $
*/
public class CreativeCommonsTextStreamDisseminationCrosswalk
implements StreamDisseminationCrosswalk
{
/** log4j logger */
private static Logger log = Logger.getLogger(CreativeCommonsTextStreamDisseminationCrosswalk.class);
public boolean canDisseminate(Context context, DSpaceObject dso)
{
try
{
return dso.getType() == Constants.ITEM &&
CreativeCommons.getLicenseTextBitstream((Item)dso) != null;
}
catch (Exception e)
{
log.error("Failed getting CC license", e);
return false;
}
}
public void disseminate(Context context, DSpaceObject dso, OutputStream out)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
if (dso.getType() == Constants.ITEM)
{
Bitstream cc = CreativeCommons.getLicenseTextBitstream((Item)dso);
if (cc != null)
{
Utils.copy(cc.retrieve(), out);
out.close();
}
}
}
public String getMIMEType()
{
return "text/plain";
}
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import java.io.OutputStream;
import java.io.IOException;
import java.sql.SQLException;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.Bitstream;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.Utils;
import org.dspace.license.CreativeCommons;
/**
* Export the object's Creative Commons license, text form.
*
* @author Larry Stone
* @version $Revision: 1.0 $
*/
public class CreativeCommonsTextStreamDisseminationCrosswalk
implements StreamDisseminationCrosswalk
{
/** log4j logger */
private static Logger log = Logger.getLogger(CreativeCommonsTextStreamDisseminationCrosswalk.class);
public boolean canDisseminate(Context context, DSpaceObject dso)
{
try
{
return dso.getType() == Constants.ITEM &&
CreativeCommons.getLicenseTextBitstream((Item)dso) != null;
}
catch (Exception e)
{
log.error("Failed getting CC license", e);
return false;
}
}
public void disseminate(Context context, DSpaceObject dso, OutputStream out)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
if (dso.getType() == Constants.ITEM)
{
Bitstream cc = CreativeCommons.getLicenseTextBitstream((Item)dso);
if (cc != null)
{
Utils.copy(cc.retrieve(), out);
out.close();
}
}
}
public String getMIMEType()
{
return "text/plain";
}
}

View File

@@ -1,21 +1,21 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
public interface IConverter
{
/**
* Get an alternative format for the input string. Useful examples are
* conversion from a metadata language value in ISO-639-3 to ISO-639-1, etc.
*
* @param value
* the input string to convert
* @return the converted string returned by the "conversion algorithm"
*/
public String makeConversion(String value);
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
public interface IConverter
{
/**
* Get an alternative format for the input string. Useful examples are
* conversion from a metadata language value in ISO-639-3 to ISO-639-1, etc.
*
* @param value
* the input string to convert
* @return the converted string returned by the "conversion algorithm"
*/
public String makeConversion(String value);
}

View File

@@ -1,70 +1,70 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import java.io.OutputStream;
import java.io.IOException;
import java.sql.SQLException;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Bitstream;
import org.dspace.content.Item;
import org.dspace.content.packager.PackageUtils;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.Utils;
/**
* Export the object's DSpace deposit license.
*
* @author Larry Stone
* @version $Revision: 1.0 $
*/
public class LicenseStreamDisseminationCrosswalk
implements StreamDisseminationCrosswalk
{
/** log4j logger */
private static Logger log = Logger.getLogger(LicenseStreamDisseminationCrosswalk.class);
public boolean canDisseminate(Context context, DSpaceObject dso)
{
try
{
return dso.getType() == Constants.ITEM &&
PackageUtils.findDepositLicense(context, (Item)dso) != null;
}
catch (Exception e)
{
log.error("Failed getting Deposit license", e);
return false;
}
}
public void disseminate(Context context, DSpaceObject dso, OutputStream out)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
if (dso.getType() == Constants.ITEM)
{
Bitstream licenseBs = PackageUtils.findDepositLicense(context, (Item)dso);
if (licenseBs != null)
{
Utils.copy(licenseBs.retrieve(), out);
out.close();
}
}
}
public String getMIMEType()
{
return "text/plain";
}
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import java.io.OutputStream;
import java.io.IOException;
import java.sql.SQLException;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Bitstream;
import org.dspace.content.Item;
import org.dspace.content.packager.PackageUtils;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.Utils;
/**
* Export the object's DSpace deposit license.
*
* @author Larry Stone
* @version $Revision: 1.0 $
*/
public class LicenseStreamDisseminationCrosswalk
implements StreamDisseminationCrosswalk
{
/** log4j logger */
private static Logger log = Logger.getLogger(LicenseStreamDisseminationCrosswalk.class);
public boolean canDisseminate(Context context, DSpaceObject dso)
{
try
{
return dso.getType() == Constants.ITEM &&
PackageUtils.findDepositLicense(context, (Item)dso) != null;
}
catch (Exception e)
{
log.error("Failed getting Deposit license", e);
return false;
}
}
public void disseminate(Context context, DSpaceObject dso, OutputStream out)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
if (dso.getType() == Constants.ITEM)
{
Bitstream licenseBs = PackageUtils.findDepositLicense(context, (Item)dso);
if (licenseBs != null)
{
Utils.copy(licenseBs.retrieve(), out);
out.close();
}
}
}
public String getMIMEType()
{
return "text/plain";
}
}

View File

@@ -1,66 +1,66 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.io.IOException;
import java.sql.SQLException;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.packager.PackageUtils;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.Utils;
/**
* Accept a DSpace deposit license.
* <p>
* Note that this is NOT needed when ingesting a DSpace AIP, since the
* deposit license is stored as a Bitstream (or two) in a dedicated Bundle;
* the normal apparatus of ingestig the AIP will restore that Bitstream
* with its proper name and thus the presence of the deposit license.
* <p>
* This crosswalk should only be used when ingesting other kinds of SIPs.
*
* @author Larry Stone
* @version $Revision: 1.0 $
*/
public class LicenseStreamIngestionCrosswalk
implements StreamIngestionCrosswalk
{
/** log4j logger */
private static Logger log = Logger.getLogger(LicenseStreamIngestionCrosswalk.class);
public void ingest(Context context, DSpaceObject dso, InputStream in, String MIMEType)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
// If package includes a Creative Commons license, add that:
if (dso.getType() == Constants.ITEM)
{
if (log.isDebugEnabled())
{
log.debug("Reading a DSpace Deposit license, MIMEtype=" + MIMEType);
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Utils.copy(in, baos);
PackageUtils.addDepositLicense(context, baos.toString(),
(Item)dso, null);
}
}
public String getMIMEType()
{
return "text/plain";
}
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.io.IOException;
import java.sql.SQLException;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.packager.PackageUtils;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.Utils;
/**
* Accept a DSpace deposit license.
* <p>
* Note that this is NOT needed when ingesting a DSpace AIP, since the
* deposit license is stored as a Bitstream (or two) in a dedicated Bundle;
* the normal apparatus of ingestig the AIP will restore that Bitstream
* with its proper name and thus the presence of the deposit license.
* <p>
* This crosswalk should only be used when ingesting other kinds of SIPs.
*
* @author Larry Stone
* @version $Revision: 1.0 $
*/
public class LicenseStreamIngestionCrosswalk
implements StreamIngestionCrosswalk
{
/** log4j logger */
private static Logger log = Logger.getLogger(LicenseStreamIngestionCrosswalk.class);
public void ingest(Context context, DSpaceObject dso, InputStream in, String MIMEType)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
// If package includes a Creative Commons license, add that:
if (dso.getType() == Constants.ITEM)
{
if (log.isDebugEnabled())
{
log.debug("Reading a DSpace Deposit license, MIMEtype=" + MIMEType);
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Utils.copy(in, baos);
PackageUtils.addDepositLicense(context, baos.toString(),
(Item)dso, null);
}
}
public String getMIMEType()
{
return "text/plain";
}
}

View File

@@ -1,43 +1,43 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import java.io.InputStream;
import java.io.IOException;
import java.sql.SQLException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
/**
* A crosswalk to ignore and dispose of the ingested material.
* <p>
* Specify this crosswalk in the mapping of e.g. METS metadata field
* types to crosswalks when you wish to ignore a redundant or unknown
* type of metadata. For example, when ingesting a DSpace AIP with an
* AIP ingester, it is best to ignore the rightsMD fields since they
* are already going to be ingested as member bitstreams anyway.
*
* @author Larry Stone
* @version $Revision: 1.0 $
*/
public class NullStreamIngestionCrosswalk
implements StreamIngestionCrosswalk
{
public void ingest(Context context, DSpaceObject dso, InputStream in, String MIMEType)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
in.close();
}
public String getMIMEType()
{
return "text/plain";
}
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import java.io.InputStream;
import java.io.IOException;
import java.sql.SQLException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
/**
* A crosswalk to ignore and dispose of the ingested material.
* <p>
* Specify this crosswalk in the mapping of e.g. METS metadata field
* types to crosswalks when you wish to ignore a redundant or unknown
* type of metadata. For example, when ingesting a DSpace AIP with an
* AIP ingester, it is best to ignore the rightsMD fields since they
* are already going to be ingested as member bitstreams anyway.
*
* @author Larry Stone
* @version $Revision: 1.0 $
*/
public class NullStreamIngestionCrosswalk
implements StreamIngestionCrosswalk
{
public void ingest(Context context, DSpaceObject dso, InputStream in, String MIMEType)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
in.close();
}
public String getMIMEType()
{
return "text/plain";
}
}

View File

@@ -1,343 +1,343 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.sql.SQLException;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.packager.PackageDisseminator;
import org.dspace.content.packager.PackageException;
import org.dspace.content.packager.PackageIngester;
import org.dspace.content.packager.PackageParameters;
import org.dspace.content.packager.RoleDisseminator;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.PluginManager;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jdom.Namespace;
import org.jdom.input.SAXBuilder;
import org.jdom.output.XMLOutputter;
/**
* Role Crosswalk
* <p>
* Translate between DSpace Group & EPeople definitions and a DSpace-specific
* XML export format (generated by the RoleDisseminator). This is primarily
* used for AIPs, but may be used by other Packagers as necessary.
* <p>
* This crosswalk allows you to export DSpace Groups & EPeople to this XML
* structured format. It also allows you to import an XML file of this format
* in order to restore DSpace Groups and EPeople defined within it.
* <p>
* This is just wrappers; the real work is done in RoleDisseminator and
* RoleIngester.
*
* @author mwood
* @author Tim Donohue
* @see org.dspace.content.packager.RoleDisseminator
* @see org.dspace.content.packager.RoleIngester
* @see AbstractPackagerWrappingCrosswalk
* @see IngestionCrosswalk
* @see DisseminationCrosswalk
*/
public class RoleCrosswalk
extends AbstractPackagerWrappingCrosswalk
implements IngestionCrosswalk, DisseminationCrosswalk
{
// Plugin Name of DSPACE-ROLES packager to use for ingest/dissemination
// (Whatever plugin is defined with this name in 'dspace.cfg' will be used by this Crosswalk)
private static final String ROLE_PACKAGER_PLUGIN = "DSPACE-ROLES";
// ---- Dissemination Methods -----------
/**
* Get XML namespaces of the elements this crosswalk may return.
* Returns the XML namespaces (as JDOM objects) of the root element.
*
* @return array of namespaces, which may be empty.
*/
@Override
public Namespace[] getNamespaces()
{
Namespace result[] = new Namespace[1];
result[0] = RoleDisseminator.DSROLES_NS;
return result;
}
/**
* Get the XML Schema location(s) of the target metadata format.
* Returns the string value of the <code>xsi:schemaLocation</code>
* attribute that should be applied to the generated XML.
* <p>
* It may return the empty string if no schema is known, but crosswalk
* authors are strongly encouraged to implement this call so their output
* XML can be validated correctly.
* @return SchemaLocation string, including URI namespace, followed by
* whitespace and URI of XML schema document, or empty string if unknown.
*/
@Override
public String getSchemaLocation()
{
return "";
}
/**
* Predicate: Can this disseminator crosswalk the given object.
*
* @param dso dspace object, e.g. an <code>Item</code>.
* @return true when disseminator is capable of producing metadata.
*/
@Override
public boolean canDisseminate(DSpaceObject dso)
{
//We can only disseminate SITE, COMMUNITY or COLLECTION objects,
//as Groups are only associated with those objects.
return (dso.getType() == Constants.SITE ||
dso.getType() == Constants.COMMUNITY ||
dso.getType() == Constants.COLLECTION);
}
/**
* Predicate: Does this disseminator prefer to return a list of Elements,
* rather than a single root Element?
*
* @return true when disseminator prefers you call disseminateList().
*/
@Override
public boolean preferList()
{
//We prefer disseminators call 'disseminateElement()' instead of 'disseminateList()'
return false;
}
/**
* Execute crosswalk, returning List of XML elements.
* Returns a <code>List</code> of JDOM <code>Element</code> objects representing
* the XML produced by the crosswalk. This is typically called when
* a list of fields is desired, e.g. for embedding in a METS document
* <code>xmlData</code> field.
* <p>
* When there are no results, an
* empty list is returned, but never <code>null</code>.
*
* @param dso the DSpace Object whose metadata to export.
* @return results of crosswalk as list of XML elements.
*
* @throws CrosswalkInternalException (<code>CrosswalkException</code>) failure of the crosswalk itself.
* @throws CrosswalkObjectNotSupported (<code>CrosswalkException</code>) Cannot crosswalk this kind of DSpace object.
* @throws IOException I/O failure in services this calls
* @throws SQLException Database failure in services this calls
* @throws AuthorizeException current user not authorized for this operation.
*/
@Override
public List<Element> disseminateList(DSpaceObject dso)
throws CrosswalkException, IOException, SQLException,
AuthorizeException
{
Element dim = disseminateElement(dso);
return dim.getChildren();
}
/**
* Execute crosswalk, returning one XML root element as
* a JDOM <code>Element</code> object.
* This is typically the root element of a document.
* <p>
*
* @param dso the DSpace Object whose metadata to export.
* @return root Element of the target metadata, never <code>null</code>
*
* @throws CrosswalkInternalException (<code>CrosswalkException</code>) failure of the crosswalk itself.
* @throws CrosswalkObjectNotSupported (<code>CrosswalkException</code>) Cannot crosswalk this kind of DSpace object.
* @throws IOException I/O failure in services this calls
* @throws SQLException Database failure in services this calls
* @throws AuthorizeException current user not authorized for this operation.
*/
@Override
public Element disseminateElement(DSpaceObject dso)
throws CrosswalkException, IOException, SQLException,
AuthorizeException
{
try
{
PackageDisseminator dip = (PackageDisseminator)
PluginManager.getNamedPlugin(PackageDisseminator.class, ROLE_PACKAGER_PLUGIN);
if (dip == null)
{
throw new CrosswalkInternalException("Cannot find a PackageDisseminator plugin named " + ROLE_PACKAGER_PLUGIN);
}
// Create a temporary file to disseminate into
String tempDirectory = ConfigurationManager.getProperty("upload.temp.dir");
File tempFile = File.createTempFile("RoleCrosswalkDisseminate" + dso.hashCode(), null, new File(tempDirectory));
tempFile.deleteOnExit();
// Initialize our packaging parameters
PackageParameters pparams;
if(this.getPackagingParameters()!=null)
{
pparams = this.getPackagingParameters();
}
else
{
pparams = new PackageParameters();
}
//actually disseminate to our temp file.
Context context = new Context();
dip.disseminate(context, dso, pparams, tempFile);
context.complete();
// if we ended up with a Zero-length output file,
// this means dissemination was successful but had no results
if(tempFile.exists() && tempFile.length()==0)
{
return null;
}
try
{
//Try to parse our XML results (which were disseminated by the Packager)
SAXBuilder builder = new SAXBuilder();
Document xmlDocument = builder.build(tempFile);
//If XML parsed successfully, return root element of doc
if(xmlDocument!=null && xmlDocument.hasRootElement())
{
return xmlDocument.getRootElement();
}
else
{
return null;
}
}
catch (JDOMException je)
{
throw new MetadataValidationException("Error parsing Roles XML (see wrapped error message for more details) ",je);
}
}
catch (PackageException pe)
{
throw new CrosswalkInternalException("Failed to export Roles via packager (see wrapped error message for more details) ",pe);
}
}
// ---- Ingestion Methods -----------
/**
* Ingest a List of XML elements
*
* @param context
* @param dso
* @param metadata
* @throws CrosswalkException
* @throws IOException
* @throws SQLException
* @throws AuthorizeException
*/
@Override
public void ingest(Context context, DSpaceObject dso, List<Element> metadata)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
if(!metadata.isEmpty())
{
ingest(context, dso, ((Element) metadata.get(0)).getParentElement());
}
}
/**
* Ingest a whole XML document, starting at specified root.
* <P>
* This essentially just wraps a call to the configured Role PackageIngester.
*
* @param context
* @param dso
* @param root
* @throws CrosswalkException
* @throws IOException
* @throws SQLException
* @throws AuthorizeException
*/
@Override
public void ingest(Context context, DSpaceObject dso, Element root)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
if (dso.getType() != Constants.SITE &&
dso.getType() != Constants.COMMUNITY &&
dso.getType() != Constants.COLLECTION)
{
throw new CrosswalkObjectNotSupported("Role crosswalk only valid for Site, Community or Collection");
}
//locate our "DSPACE-ROLES" PackageIngester plugin
PackageIngester sip = (PackageIngester)
PluginManager.getNamedPlugin(PackageIngester.class, ROLE_PACKAGER_PLUGIN);
if (sip == null)
{
throw new CrosswalkInternalException("Cannot find a PackageIngester plugin named " + ROLE_PACKAGER_PLUGIN);
}
// Initialize our packaging parameters
PackageParameters pparams;
if(this.getPackagingParameters()!=null)
{
pparams = this.getPackagingParameters();
}
else
{
pparams = new PackageParameters();
}
// Initialize our license info
String license = null;
if(this.getIngestionLicense()!=null)
{
license = this.getIngestionLicense();
}
// Create a temporary file to ingest from
String tempDirectory = ConfigurationManager.getProperty("upload.temp.dir");
File tempFile = File.createTempFile("RoleCrosswalkIngest" + dso.hashCode(), null, new File(tempDirectory));
tempFile.deleteOnExit();
FileOutputStream fileOutStream = null;
try
{
fileOutStream = new FileOutputStream(tempFile);
XMLOutputter writer = new XMLOutputter();
writer.output(root, fileOutStream);
}
finally
{
if (fileOutStream != null)
{
fileOutStream.close();
}
}
//Actually call the ingester
try
{
sip.ingest(context, dso, tempFile, pparams, license);
}
catch (PackageException e)
{
throw new CrosswalkInternalException(e);
}
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.sql.SQLException;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.packager.PackageDisseminator;
import org.dspace.content.packager.PackageException;
import org.dspace.content.packager.PackageIngester;
import org.dspace.content.packager.PackageParameters;
import org.dspace.content.packager.RoleDisseminator;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.PluginManager;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jdom.Namespace;
import org.jdom.input.SAXBuilder;
import org.jdom.output.XMLOutputter;
/**
* Role Crosswalk
* <p>
* Translate between DSpace Group & EPeople definitions and a DSpace-specific
* XML export format (generated by the RoleDisseminator). This is primarily
* used for AIPs, but may be used by other Packagers as necessary.
* <p>
* This crosswalk allows you to export DSpace Groups & EPeople to this XML
* structured format. It also allows you to import an XML file of this format
* in order to restore DSpace Groups and EPeople defined within it.
* <p>
* This is just wrappers; the real work is done in RoleDisseminator and
* RoleIngester.
*
* @author mwood
* @author Tim Donohue
* @see org.dspace.content.packager.RoleDisseminator
* @see org.dspace.content.packager.RoleIngester
* @see AbstractPackagerWrappingCrosswalk
* @see IngestionCrosswalk
* @see DisseminationCrosswalk
*/
public class RoleCrosswalk
extends AbstractPackagerWrappingCrosswalk
implements IngestionCrosswalk, DisseminationCrosswalk
{
// Plugin Name of DSPACE-ROLES packager to use for ingest/dissemination
// (Whatever plugin is defined with this name in 'dspace.cfg' will be used by this Crosswalk)
private static final String ROLE_PACKAGER_PLUGIN = "DSPACE-ROLES";
// ---- Dissemination Methods -----------
/**
* Get XML namespaces of the elements this crosswalk may return.
* Returns the XML namespaces (as JDOM objects) of the root element.
*
* @return array of namespaces, which may be empty.
*/
@Override
public Namespace[] getNamespaces()
{
Namespace result[] = new Namespace[1];
result[0] = RoleDisseminator.DSROLES_NS;
return result;
}
/**
* Get the XML Schema location(s) of the target metadata format.
* Returns the string value of the <code>xsi:schemaLocation</code>
* attribute that should be applied to the generated XML.
* <p>
* It may return the empty string if no schema is known, but crosswalk
* authors are strongly encouraged to implement this call so their output
* XML can be validated correctly.
* @return SchemaLocation string, including URI namespace, followed by
* whitespace and URI of XML schema document, or empty string if unknown.
*/
@Override
public String getSchemaLocation()
{
return "";
}
/**
* Predicate: Can this disseminator crosswalk the given object.
*
* @param dso dspace object, e.g. an <code>Item</code>.
* @return true when disseminator is capable of producing metadata.
*/
@Override
public boolean canDisseminate(DSpaceObject dso)
{
//We can only disseminate SITE, COMMUNITY or COLLECTION objects,
//as Groups are only associated with those objects.
return (dso.getType() == Constants.SITE ||
dso.getType() == Constants.COMMUNITY ||
dso.getType() == Constants.COLLECTION);
}
/**
* Predicate: Does this disseminator prefer to return a list of Elements,
* rather than a single root Element?
*
* @return true when disseminator prefers you call disseminateList().
*/
@Override
public boolean preferList()
{
//We prefer disseminators call 'disseminateElement()' instead of 'disseminateList()'
return false;
}
/**
* Execute crosswalk, returning List of XML elements.
* Returns a <code>List</code> of JDOM <code>Element</code> objects representing
* the XML produced by the crosswalk. This is typically called when
* a list of fields is desired, e.g. for embedding in a METS document
* <code>xmlData</code> field.
* <p>
* When there are no results, an
* empty list is returned, but never <code>null</code>.
*
* @param dso the DSpace Object whose metadata to export.
* @return results of crosswalk as list of XML elements.
*
* @throws CrosswalkInternalException (<code>CrosswalkException</code>) failure of the crosswalk itself.
* @throws CrosswalkObjectNotSupported (<code>CrosswalkException</code>) Cannot crosswalk this kind of DSpace object.
* @throws IOException I/O failure in services this calls
* @throws SQLException Database failure in services this calls
* @throws AuthorizeException current user not authorized for this operation.
*/
@Override
public List<Element> disseminateList(DSpaceObject dso)
throws CrosswalkException, IOException, SQLException,
AuthorizeException
{
Element dim = disseminateElement(dso);
return dim.getChildren();
}
/**
* Execute crosswalk, returning one XML root element as
* a JDOM <code>Element</code> object.
* This is typically the root element of a document.
* <p>
*
* @param dso the DSpace Object whose metadata to export.
* @return root Element of the target metadata, never <code>null</code>
*
* @throws CrosswalkInternalException (<code>CrosswalkException</code>) failure of the crosswalk itself.
* @throws CrosswalkObjectNotSupported (<code>CrosswalkException</code>) Cannot crosswalk this kind of DSpace object.
* @throws IOException I/O failure in services this calls
* @throws SQLException Database failure in services this calls
* @throws AuthorizeException current user not authorized for this operation.
*/
@Override
public Element disseminateElement(DSpaceObject dso)
throws CrosswalkException, IOException, SQLException,
AuthorizeException
{
try
{
PackageDisseminator dip = (PackageDisseminator)
PluginManager.getNamedPlugin(PackageDisseminator.class, ROLE_PACKAGER_PLUGIN);
if (dip == null)
{
throw new CrosswalkInternalException("Cannot find a PackageDisseminator plugin named " + ROLE_PACKAGER_PLUGIN);
}
// Create a temporary file to disseminate into
String tempDirectory = ConfigurationManager.getProperty("upload.temp.dir");
File tempFile = File.createTempFile("RoleCrosswalkDisseminate" + dso.hashCode(), null, new File(tempDirectory));
tempFile.deleteOnExit();
// Initialize our packaging parameters
PackageParameters pparams;
if(this.getPackagingParameters()!=null)
{
pparams = this.getPackagingParameters();
}
else
{
pparams = new PackageParameters();
}
//actually disseminate to our temp file.
Context context = new Context();
dip.disseminate(context, dso, pparams, tempFile);
context.complete();
// if we ended up with a Zero-length output file,
// this means dissemination was successful but had no results
if(tempFile.exists() && tempFile.length()==0)
{
return null;
}
try
{
//Try to parse our XML results (which were disseminated by the Packager)
SAXBuilder builder = new SAXBuilder();
Document xmlDocument = builder.build(tempFile);
//If XML parsed successfully, return root element of doc
if(xmlDocument!=null && xmlDocument.hasRootElement())
{
return xmlDocument.getRootElement();
}
else
{
return null;
}
}
catch (JDOMException je)
{
throw new MetadataValidationException("Error parsing Roles XML (see wrapped error message for more details) ",je);
}
}
catch (PackageException pe)
{
throw new CrosswalkInternalException("Failed to export Roles via packager (see wrapped error message for more details) ",pe);
}
}
// ---- Ingestion Methods -----------
/**
* Ingest a List of XML elements
*
* @param context
* @param dso
* @param metadata
* @throws CrosswalkException
* @throws IOException
* @throws SQLException
* @throws AuthorizeException
*/
@Override
public void ingest(Context context, DSpaceObject dso, List<Element> metadata)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
if(!metadata.isEmpty())
{
ingest(context, dso, ((Element) metadata.get(0)).getParentElement());
}
}
/**
* Ingest a whole XML document, starting at specified root.
* <P>
* This essentially just wraps a call to the configured Role PackageIngester.
*
* @param context
* @param dso
* @param root
* @throws CrosswalkException
* @throws IOException
* @throws SQLException
* @throws AuthorizeException
*/
@Override
public void ingest(Context context, DSpaceObject dso, Element root)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
if (dso.getType() != Constants.SITE &&
dso.getType() != Constants.COMMUNITY &&
dso.getType() != Constants.COLLECTION)
{
throw new CrosswalkObjectNotSupported("Role crosswalk only valid for Site, Community or Collection");
}
//locate our "DSPACE-ROLES" PackageIngester plugin
PackageIngester sip = (PackageIngester)
PluginManager.getNamedPlugin(PackageIngester.class, ROLE_PACKAGER_PLUGIN);
if (sip == null)
{
throw new CrosswalkInternalException("Cannot find a PackageIngester plugin named " + ROLE_PACKAGER_PLUGIN);
}
// Initialize our packaging parameters
PackageParameters pparams;
if(this.getPackagingParameters()!=null)
{
pparams = this.getPackagingParameters();
}
else
{
pparams = new PackageParameters();
}
// Initialize our license info
String license = null;
if(this.getIngestionLicense()!=null)
{
license = this.getIngestionLicense();
}
// Create a temporary file to ingest from
String tempDirectory = ConfigurationManager.getProperty("upload.temp.dir");
File tempFile = File.createTempFile("RoleCrosswalkIngest" + dso.hashCode(), null, new File(tempDirectory));
tempFile.deleteOnExit();
FileOutputStream fileOutStream = null;
try
{
fileOutStream = new FileOutputStream(tempFile);
XMLOutputter writer = new XMLOutputter();
writer.output(root, fileOutStream);
}
finally
{
if (fileOutStream != null)
{
fileOutStream.close();
}
}
//Actually call the ingester
try
{
sip.ingest(context, dso, tempFile, pparams, license);
}
catch (PackageException e)
{
throw new CrosswalkInternalException(e);
}
}
}

View File

@@ -1,51 +1,51 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.license;
import java.util.Formattable;
import java.util.Formatter;
import org.dspace.core.PluginManager;
/**
* Wrapper class to make formattable any argument used in the license template.
* The formatter behavior is delegated to a specific class on "type" basis
* using the PluginManager
*
* @see Formattable
* @see LicenseArgumentFormatter
* @author bollini
*
*/
public class FormattableArgument implements Formattable
{
private String type;
private Object object;
public FormattableArgument(String type, Object object)
{
this.type = type;
this.object = object;
}
public void formatTo(Formatter formatter, int flags, int width,
int precision)
{
LicenseArgumentFormatter laf = (LicenseArgumentFormatter) PluginManager
.getNamedPlugin(LicenseArgumentFormatter.class, type);
if (laf != null)
{
laf.formatTo(formatter, flags, width, object, type);
}
else
{
formatter.format(object.toString());
}
}
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.license;
import java.util.Formattable;
import java.util.Formatter;
import org.dspace.core.PluginManager;
/**
* Wrapper class to make formattable any argument used in the license template.
* The formatter behavior is delegated to a specific class on "type" basis
* using the PluginManager
*
* @see Formattable
* @see LicenseArgumentFormatter
* @author bollini
*
*/
public class FormattableArgument implements Formattable
{
private String type;
private Object object;
public FormattableArgument(String type, Object object)
{
this.type = type;
this.object = object;
}
public void formatTo(Formatter formatter, int flags, int width,
int precision)
{
LicenseArgumentFormatter laf = (LicenseArgumentFormatter) PluginManager
.getNamedPlugin(LicenseArgumentFormatter.class, type);
if (laf != null)
{
laf.formatTo(formatter, flags, width, object, type);
}
else
{
formatter.format(object.toString());
}
}
}

View File

@@ -1,37 +1,37 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.license;
import java.util.Formatter;
public interface LicenseArgumentFormatter
{
/**
* Format the object following the <code>java.util.Formatter</code> rules.
* The object type is expected to be know to the implementer can is free to
* assume safe to cast as appropriate. If a <code>null</code> object is
* supplied is expected that the implementer will work as if a "sample data"
* was requested.
*
* @see Formatter
* @param formatter
* the current formatter that need to process the object
* @param flags
* the flags option for the formatter
* @param width
* the width option for the formatter
* @param object
* the object to be formatted
* @param type
* the type of the object (this is an alias not the class name! -
* i.e. item, collection, eperson, etc.)
*/
void formatTo(Formatter formatter, int flags, int width, Object object,
String type);
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.license;
import java.util.Formatter;
public interface LicenseArgumentFormatter
{
/**
* Format the object following the <code>java.util.Formatter</code> rules.
* The object type is expected to be know to the implementer can is free to
* assume safe to cast as appropriate. If a <code>null</code> object is
* supplied is expected that the implementer will work as if a "sample data"
* was requested.
*
* @see Formatter
* @param formatter
* the current formatter that need to process the object
* @param flags
* the flags option for the formatter
* @param width
* the width option for the formatter
* @param object
* the object to be formatted
* @param type
* the type of the object (this is an alias not the class name! -
* i.e. item, collection, eperson, etc.)
*/
void formatTo(Formatter formatter, int flags, int width, Object object,
String type);
}

View File

@@ -1,45 +1,45 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.license;
import java.util.Formatter;
import org.dspace.content.DSpaceObject;
/**
* This is a simple implementation of the LicenseArgumentFormatter for a
* DSpaceObject. The formatter options width/precision are not take in care.
*
* @author bollini
*
*/
public class SimpleDSpaceObjectLicenseFormatter implements
LicenseArgumentFormatter
{
public void formatTo(Formatter formatter, int flags, int width,
Object object, String type)
{
if (object == null)
{
formatter.format("sample "+type);
}
else
{
DSpaceObject dso = (DSpaceObject) object;
String name = dso.getName();
if (name != null)
{
formatter.format(name);
}
else
{
formatter.format("");
}
}
}
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.license;
import java.util.Formatter;
import org.dspace.content.DSpaceObject;
/**
* This is a simple implementation of the LicenseArgumentFormatter for a
* DSpaceObject. The formatter options width/precision are not take in care.
*
* @author bollini
*
*/
public class SimpleDSpaceObjectLicenseFormatter implements
LicenseArgumentFormatter
{
public void formatTo(Formatter formatter, int flags, int width,
Object object, String type)
{
if (object == null)
{
formatter.format("sample "+type);
}
else
{
DSpaceObject dso = (DSpaceObject) object;
String name = dso.getName();
if (name != null)
{
formatter.format(name);
}
else
{
formatter.format("");
}
}
}
}

View File

@@ -1,191 +1,191 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.ItemIterator;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import java.io.File;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
/**
* An abstract implementation of a DSpace Package Disseminator, which
* implements a few helper/utility methods that most (all?) PackageDisseminators
* may find useful.
* <P>
* First, implements recursive functionality in the disseminateAll()
* method of the PackageIngester interface. This method is setup to
* recursively call disseminate() method.
* <P>
* All Package disseminators should either extend this abstract class
* or implement <code>PackageDisseminator</code> to better suit their needs.
*
* @author Tim Donohue
* @see PackageDisseminator
*/
public abstract class AbstractPackageDisseminator
implements PackageDisseminator
{
/** List of all successfully disseminated package files */
private List<File> packageFileList = new ArrayList<File>();
/**
* Recursively export one or more DSpace Objects as a series of packages.
* This method will export the given DSpace Object as well as all referenced
* DSpaceObjects (e.g. child objects) into a series of packages. The
* initial object is exported to the location specified by the OutputStream.
* All other packages are exported to the same directory location.
* <p>
* Package is any serialized representation of the item, at the discretion
* of the implementing class. It does not have to include content bitstreams.
* <br>
* Use the <code>params</code> parameter list to adjust the way the
* package is made, e.g. including a "<code>metadataOnly</code>"
* parameter might make the package a bare manifest in XML
* instead of a Zip file including manifest and contents.
* <br>
* Throws an exception of the initial object is not acceptable or there is
* a failure creating the package.
*
* @param context DSpace context.
* @param dso initial DSpace object
* @param params Properties-style list of options specific to this packager
* @param pkgFile File where initial package should be written. All other
* packages will be written to the same directory as this File.
* @throws PackageValidationException if package cannot be created or there is
* a fatal error in creating it.
*/
@Override
public List<File> disseminateAll(Context context, DSpaceObject dso,
PackageParameters params, File pkgFile)
throws PackageException, CrosswalkException,
AuthorizeException, SQLException, IOException
{
//If unset, make sure the Parameters specifies this is a recursive dissemination
if(!params.recursiveModeEnabled())
{
params.setRecursiveModeEnabled(true);
}
//try to disseminate the first object using provided PackageDisseminator
disseminate(context, dso, params, pkgFile);
//check if package was disseminated
if(pkgFile.exists())
{
//add to list of successfully disseminated packages
addToPackageList(pkgFile);
//We can only recursively disseminate non-Items
//(NOTE: Items have no children, as Bitstreams/Bundles are created from Item packages)
if(dso.getType()!=Constants.ITEM)
{
//Determine where first file package was disseminated to, as all
//others will be written to same directory
String pkgDirectory = pkgFile.getCanonicalFile().getParent();
if(!pkgDirectory.endsWith(File.separator))
{
pkgDirectory += File.separator;
}
String fileExtension = PackageUtils.getFileExtension(pkgFile.getName());
//recursively disseminate content, based on object type
switch (dso.getType())
{
case Constants.COLLECTION :
//Also find all Items in this Collection and disseminate
Collection collection = (Collection) dso;
ItemIterator iterator = collection.getItems();
while(iterator.hasNext())
{
Item item = iterator.next();
//disseminate all items (recursively!)
String childFileName = pkgDirectory + PackageUtils.getPackageName(item, fileExtension);
disseminateAll(context, item, params, new File(childFileName));
}
break;
case Constants.COMMUNITY :
//Also find all SubCommunities in this Community and disseminate
Community community = (Community) dso;
Community[] subcommunities = community.getSubcommunities();
for(int i=0; i<subcommunities.length; i++)
{
//disseminate all sub-communities (recursively!)
String childFileName = pkgDirectory + PackageUtils.getPackageName(subcommunities[i], fileExtension);
disseminateAll(context, subcommunities[i], params, new File(childFileName));
}
//Also find all Collections in this Community and disseminate
Collection[] collections = community.getCollections();
for(int i=0; i<collections.length; i++)
{
//disseminate all collections (recursively!)
String childFileName = pkgDirectory + PackageUtils.getPackageName(collections[i], fileExtension);
disseminateAll(context, collections[i], params, new File(childFileName));
}
break;
case Constants.SITE :
//Also find all top-level Communities and disseminate
Community[] topCommunities = Community.findAllTop(context);
for(int i=0; i<topCommunities.length; i++)
{
//disseminate all top-level communities (recursively!)
String childFileName = pkgDirectory + PackageUtils.getPackageName(topCommunities[i], fileExtension);
disseminateAll(context, topCommunities[i], params, new File(childFileName));
}
break;
}//end switch
}//end if not an Item
}//end if pkgFile exists
//return list of all successfully disseminated packages
return getPackageList();
}
/**
* Add File to list of successfully disseminated package files
* @param file File
*/
protected void addToPackageList(File f)
{
//add to list of successfully disseminated packages
if(!packageFileList.contains(f))
{
packageFileList.add(f);
}
}
/**
* Return List of all package Files which have been disseminated
* this instance of the Disseminator.
* <P>
* This list can be useful in reporting back to the user what content has
* been disseminated as packages. It's used by disseminateAll() to report
* what packages were created.
*
* @return List of Files which correspond to the disseminated packages
*/
protected List<File> getPackageList()
{
return packageFileList;
}
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.ItemIterator;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import java.io.File;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
/**
* An abstract implementation of a DSpace Package Disseminator, which
* implements a few helper/utility methods that most (all?) PackageDisseminators
* may find useful.
* <P>
* First, implements recursive functionality in the disseminateAll()
* method of the PackageIngester interface. This method is setup to
* recursively call disseminate() method.
* <P>
* All Package disseminators should either extend this abstract class
* or implement <code>PackageDisseminator</code> to better suit their needs.
*
* @author Tim Donohue
* @see PackageDisseminator
*/
public abstract class AbstractPackageDisseminator
implements PackageDisseminator
{
/** List of all successfully disseminated package files */
private List<File> packageFileList = new ArrayList<File>();
/**
* Recursively export one or more DSpace Objects as a series of packages.
* This method will export the given DSpace Object as well as all referenced
* DSpaceObjects (e.g. child objects) into a series of packages. The
* initial object is exported to the location specified by the OutputStream.
* All other packages are exported to the same directory location.
* <p>
* Package is any serialized representation of the item, at the discretion
* of the implementing class. It does not have to include content bitstreams.
* <br>
* Use the <code>params</code> parameter list to adjust the way the
* package is made, e.g. including a "<code>metadataOnly</code>"
* parameter might make the package a bare manifest in XML
* instead of a Zip file including manifest and contents.
* <br>
* Throws an exception of the initial object is not acceptable or there is
* a failure creating the package.
*
* @param context DSpace context.
* @param dso initial DSpace object
* @param params Properties-style list of options specific to this packager
* @param pkgFile File where initial package should be written. All other
* packages will be written to the same directory as this File.
* @throws PackageValidationException if package cannot be created or there is
* a fatal error in creating it.
*/
@Override
public List<File> disseminateAll(Context context, DSpaceObject dso,
PackageParameters params, File pkgFile)
throws PackageException, CrosswalkException,
AuthorizeException, SQLException, IOException
{
//If unset, make sure the Parameters specifies this is a recursive dissemination
if(!params.recursiveModeEnabled())
{
params.setRecursiveModeEnabled(true);
}
//try to disseminate the first object using provided PackageDisseminator
disseminate(context, dso, params, pkgFile);
//check if package was disseminated
if(pkgFile.exists())
{
//add to list of successfully disseminated packages
addToPackageList(pkgFile);
//We can only recursively disseminate non-Items
//(NOTE: Items have no children, as Bitstreams/Bundles are created from Item packages)
if(dso.getType()!=Constants.ITEM)
{
//Determine where first file package was disseminated to, as all
//others will be written to same directory
String pkgDirectory = pkgFile.getCanonicalFile().getParent();
if(!pkgDirectory.endsWith(File.separator))
{
pkgDirectory += File.separator;
}
String fileExtension = PackageUtils.getFileExtension(pkgFile.getName());
//recursively disseminate content, based on object type
switch (dso.getType())
{
case Constants.COLLECTION :
//Also find all Items in this Collection and disseminate
Collection collection = (Collection) dso;
ItemIterator iterator = collection.getItems();
while(iterator.hasNext())
{
Item item = iterator.next();
//disseminate all items (recursively!)
String childFileName = pkgDirectory + PackageUtils.getPackageName(item, fileExtension);
disseminateAll(context, item, params, new File(childFileName));
}
break;
case Constants.COMMUNITY :
//Also find all SubCommunities in this Community and disseminate
Community community = (Community) dso;
Community[] subcommunities = community.getSubcommunities();
for(int i=0; i<subcommunities.length; i++)
{
//disseminate all sub-communities (recursively!)
String childFileName = pkgDirectory + PackageUtils.getPackageName(subcommunities[i], fileExtension);
disseminateAll(context, subcommunities[i], params, new File(childFileName));
}
//Also find all Collections in this Community and disseminate
Collection[] collections = community.getCollections();
for(int i=0; i<collections.length; i++)
{
//disseminate all collections (recursively!)
String childFileName = pkgDirectory + PackageUtils.getPackageName(collections[i], fileExtension);
disseminateAll(context, collections[i], params, new File(childFileName));
}
break;
case Constants.SITE :
//Also find all top-level Communities and disseminate
Community[] topCommunities = Community.findAllTop(context);
for(int i=0; i<topCommunities.length; i++)
{
//disseminate all top-level communities (recursively!)
String childFileName = pkgDirectory + PackageUtils.getPackageName(topCommunities[i], fileExtension);
disseminateAll(context, topCommunities[i], params, new File(childFileName));
}
break;
}//end switch
}//end if not an Item
}//end if pkgFile exists
//return list of all successfully disseminated packages
return getPackageList();
}
/**
* Add File to list of successfully disseminated package files
* @param file File
*/
protected void addToPackageList(File f)
{
//add to list of successfully disseminated packages
if(!packageFileList.contains(f))
{
packageFileList.add(f);
}
}
/**
* Return List of all package Files which have been disseminated
* this instance of the Disseminator.
* <P>
* This list can be useful in reporting back to the user what content has
* been disseminated as packages. It's used by disseminateAll() to report
* what packages were created.
*
* @return List of Files which correspond to the disseminated packages
*/
protected List<File> getPackageList()
{
return packageFileList;
}
}

View File

@@ -1,380 +1,380 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import java.io.File;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
/**
* An abstract implementation of a DSpace Package Ingester, which
* implements a few helper/utility methods that most (all?) PackageIngesters
* may find useful.
* <P>
* First, implements recursive functionality in ingestAll() and replaceAll()
* methods of the PackageIngester interface. These methods are setup to
* recursively call ingest() and replace() respectively.
* <P>
* Finally, it also implements several utility methods (createDSpaceObject(),
* finishCreateItem(), updateDSpaceObject()) which subclasses may find useful.
* This classes will allow subclasses to easily create/update objects without
* having to worry too much about normal DSpace submission workflows (which is
* taken care of in these utility methods).
* <P>
* All Package ingesters should either extend this abstract class
* or implement <code>PackageIngester</code> to better suit their needs.
*
* @author Tim Donohue
* @see PackageIngester
*/
public abstract class AbstractPackageIngester
implements PackageIngester
{
/** log4j category */
private static Logger log = Logger.getLogger(AbstractPackageIngester.class);
/**
* References to other packages -- these are the next packages to ingest recursively
* Key = DSpace Object just ingested, Value = List of all packages relating to a DSpaceObject
**/
private Map<DSpaceObject,List<String>> packageReferences = new HashMap<DSpaceObject,List<String>>();
/** List of all successfully ingested/replaced DSpace objects */
private List<DSpaceObject> dsoIngestedList = new ArrayList<DSpaceObject>();
/**
* Recursively create one or more DSpace Objects out of the contents
* of the ingested package (and all other referenced packages).
* The initial object is created under the indicated parent. All other
* objects are created based on their relationship to the initial object.
* <p>
* For example, a scenario may be to create a Collection based on a
* collection-level package, and also create an Item for every item-level
* package referenced by the collection-level package.
* <p>
* The output of this method is one or more newly created <code>DspaceObject<code>s.
* <p>
* The packager <em>may</em> choose not to implement <code>ingestAll</code>,
* or simply forward the call to <code>ingest</code> if it is unable to support
* recursive ingestion.
* <p>
* The deposit license (Only significant for Item) is passed
* explicitly as a string since there is no place for it in many
* package formats. It is optional and may be given as
* <code>null</code>.
*
* @param context DSpace context.
* @param parent parent under which to create the initial object
* (may be null -- in which case ingester must determine parent from package
* or throw an error).
* @param pkgFile The initial package file to ingest
* @param params Properties-style list of options (interpreted by each packager).
* @param license may be null, which takes default license.
* @return List of DSpaceObjects created
*
* @throws PackageValidationException if initial package (or any referenced package)
* is unacceptable or there is a fatal error in creating a DSpaceObject
* @throws UnsupportedOperationException if this packager does not
* implement <code>ingestAll</code>
*/
@Override
public List<DSpaceObject> ingestAll(Context context, DSpaceObject parent, File pkgFile,
PackageParameters params, String license)
throws PackageException, UnsupportedOperationException,
CrosswalkException, AuthorizeException,
SQLException, IOException
{
//If unset, make sure the Parameters specifies this is a recursive ingest
if(!params.recursiveModeEnabled())
{
params.setRecursiveModeEnabled(true);
}
//Initial DSpace Object to ingest
DSpaceObject dso = null;
//try to ingest the first package
try
{
//actually ingest pkg using provided PackageIngester
dso = ingest(context, parent, pkgFile, params, license);
}
catch(IllegalStateException ie)
{
// NOTE: if we encounter an IllegalStateException, this means the
// handle is already in use and this object already exists.
//if we are skipping over (i.e. keeping) existing objects
if(params.keepExistingModeEnabled())
{
log.warn(LogManager.getHeader(context, "skip_package_ingest", "Object already exists, package-skipped=" + pkgFile));
}
else // Pass this exception on -- which essentially causes a full rollback of all changes (this is the default)
{
throw ie;
}
}
//as long as our first object was ingested successfully
if(dso!=null)
{
//add to list of successfully ingested objects
addToIngestedList(dso);
//We can only recursively ingest non-Items
//(NOTE: Items have no children, as Bitstreams/Bundles are created from Item packages)
if(dso.getType()!=Constants.ITEM)
{
//Check if we found child package references when ingesting this latest DSpaceObject
List<String> childPkgRefs = getPackageReferences(dso);
//we can only recursively ingest child packages
//if we have references to them
if(childPkgRefs!=null && !childPkgRefs.isEmpty())
{
//Recursively ingest each child package, using this current object as the parent DSpace Object
for(String childPkgRef : childPkgRefs)
{
// Remember where the additions start
int oldSize = dsoIngestedList.size();
//Assume package reference is relative to current package location
File childPkg = new File(pkgFile.getAbsoluteFile().getParent(), childPkgRef);
//fun, it's recursive! -- ingested referenced package as a child of current object
ingestAll(context, dso, childPkg, params, license);
// A Collection can map to Items that it does not "own".
// If a Collection package has an Item as a child, it
// should be mapped regardless of ownership.
// Note: Only perform this mapping if new items were ingested to this collection
if (Constants.COLLECTION == dso.getType() && dsoIngestedList.size()>oldSize)
{
// Since running 'ingestAll' on an item, will only ingest one Item at most,
// Just make sure that item is mapped to this collection.
Item childItem = (Item)dsoIngestedList.get(oldSize);
Collection collection = (Collection)dso;
if (!childItem.isIn(collection))
{
collection.addItem(childItem);
}
}
}
}//end if child pkgs
}//end if not an Item
}//end if DSpaceObject not null
//Return list of all objects ingested
return getIngestedList();
}
/**
* Recursively replace one or more DSpace Objects out of the contents
* of the ingested package (and all other referenced packages).
* The initial object to replace is indicated by <code>dso</code>. All other
* objects are replaced based on information provided in the referenced packages.
* <p>
* For example, a scenario may be to replace a Collection based on a
* collection-level package, and also replace *every* Item in that collection
* based on the item-level packages referenced by the collection-level package.
* <p>
* Please note that since the <code>dso</code> input only specifies the
* initial object to replace, any additional objects to replace must be
* determined based on the referenced packages (or initial package itself).
* <p>
* The output of this method is one or more replaced <code>DspaceObject<code>s.
* <p>
* The packager <em>may</em> choose not to implement <code>replaceAll</code>,
* since it somewhat contradicts the archival nature of DSpace. It also
* may choose to forward the call to <code>replace</code> if it is unable to
* support recursive replacement.
*
* @param context DSpace context.
* @param dso initial existing DSpace Object to be replaced, may be null
* if object to replace can be determined from package
* @param pkgFile The package file to ingest.
* @param params Properties-style list of options specific to this packager
* @return List of DSpaceObjects replaced
*
* @throws PackageValidationException if initial package (or any referenced package)
* is unacceptable or there is a fatal error in creating a DSpaceObject
* @throws UnsupportedOperationException if this packager does not
* implement <code>replaceAll</code>
*/
@Override
public List<DSpaceObject> replaceAll(Context context, DSpaceObject dso,
File pkgFile, PackageParameters params)
throws PackageException, UnsupportedOperationException,
CrosswalkException, AuthorizeException,
SQLException, IOException
{
//If unset, make sure the Parameters specifies this is a recursive replace
if(!params.recursiveModeEnabled())
{
params.setRecursiveModeEnabled(true);
}
//actually ingest pkg using provided PackageIngester, and replace object
//NOTE: 'dso' may be null! If it is null, the PackageIngester must determine
// the object to be replaced from the package itself.
DSpaceObject replacedDso = replace(context, dso, pkgFile, params);
//as long as our object was replaced successfully
if(replacedDso!=null)
{
//add to list of successfully replaced objects
addToIngestedList(replacedDso);
//We can only recursively replace non-Items
//(NOTE: Items have no children, as Bitstreams/Bundles are created from Item packages)
if(replacedDso.getType()!=Constants.ITEM)
{
//Check if we found child package references when replacing this latest DSpaceObject
List<String> childPkgRefs = getPackageReferences(replacedDso);
//we can only recursively ingest child packages
//if we have references to them
if(childPkgRefs!=null && !childPkgRefs.isEmpty())
{
//Recursively replace each child package
for(String childPkgRef : childPkgRefs)
{
// Remember where the additions start
int oldSize = dsoIngestedList.size();
//Assume package reference is relative to current package location
File childPkg = new File(pkgFile.getAbsoluteFile().getParent(), childPkgRef);
//fun, it's recursive! -- replaced referenced package as a child of current object
// Pass object to replace as 'null', as we don't know which object to replace.
replaceAll(context, null, childPkg, params);
// A Collection can map to Items that it does not "own".
// If a Collection package has an Item as a child, it
// should be mapped regardless of ownership.
// If a Collection package has an Item as a child, it
// should be mapped regardless of ownership.
// Note: Only perform this mapping if new items were ingested to this collection
if (Constants.COLLECTION == replacedDso.getType() && dsoIngestedList.size()>oldSize)
{
// Since running 'replaceAll' on an item, will only ingest one Item at most,
// Just make sure that item is mapped to this collection.
Item childItem = (Item)dsoIngestedList.get(oldSize);
Collection collection = (Collection)replacedDso;
if (!childItem.isIn(collection))
{
collection.addItem(childItem);
}
}
}
}//end if child pkgs
}//end if not an Item
}//end if DSpaceObject not null
//Return list of all objects replaced
return getIngestedList();
}
/**
* During ingestion process, some submission information packages (SIPs)
* may reference other packages to be ingested (recursively).
* <P>
* This method collects all references to other packages, so that we
* can choose to recursively ingest them, as necessary, alongside the
* DSpaceObject created from the original SIP.
* <P>
* References are collected based on the DSpaceObject created from the SIP
* (this way we keep the context of these references).
*
* @param dso DSpaceObject whose SIP referenced another package
* @param packageRef A reference to another package, which can be ingested after this one
*/
public void addPackageReference(DSpaceObject dso, String packageRef)
{
List<String> packageRefValues = null;
// Check if we already have an entry for packages reference by this object
if(packageReferences.containsKey(dso))
{
packageRefValues = packageReferences.get(dso);
}
else
{
//Create a new empty list of references
packageRefValues = new ArrayList<String>();
}
//add this package reference to existing list and save
packageRefValues.add(packageRef);
packageReferences.put(dso, packageRefValues);
}
/**
* Return a list of known SIP references from a newly created DSpaceObject.
* <P>
* These references should detail where another package exists which
* should be ingested alongside the current DSpaceObject.
* <P>
* The <code>AbstractPackageIngester</code> or an equivalent SIP handler is expected
* to understand how to deal with these package references.
*
* @param dso DSpaceObject whose SIP referenced other SIPs
* @return List of Strings which are the references to external submission ingestion packages
* (may be null if no SIPs were referenced)
*/
public List<String> getPackageReferences(DSpaceObject dso)
{
return packageReferences.get(dso);
}
/**
* Add DSpaceObject to list of successfully ingested/replaced objects
* @param dso DSpaceObject
*/
protected void addToIngestedList(DSpaceObject dso)
{
//add to list of successfully ingested objects
if(!dsoIngestedList.contains(dso))
{
dsoIngestedList.add(dso);
}
}
/**
* Return List of all DSpaceObjects which have been ingested/replaced by
* this instance of the Ingester.
* <P>
* This list can be useful in reporting back to the user what content has
* been added or replaced. It's used by ingestAll() and replaceAll() to
* return this list of everything that was ingested/replaced.
*
* @return List of DSpaceObjects which have been added/replaced
*/
protected List<DSpaceObject> getIngestedList()
{
return dsoIngestedList;
}
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import java.io.File;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
/**
* An abstract implementation of a DSpace Package Ingester, which
* implements a few helper/utility methods that most (all?) PackageIngesters
* may find useful.
* <P>
* First, implements recursive functionality in ingestAll() and replaceAll()
* methods of the PackageIngester interface. These methods are setup to
* recursively call ingest() and replace() respectively.
* <P>
* Finally, it also implements several utility methods (createDSpaceObject(),
* finishCreateItem(), updateDSpaceObject()) which subclasses may find useful.
* This classes will allow subclasses to easily create/update objects without
* having to worry too much about normal DSpace submission workflows (which is
* taken care of in these utility methods).
* <P>
* All Package ingesters should either extend this abstract class
* or implement <code>PackageIngester</code> to better suit their needs.
*
* @author Tim Donohue
* @see PackageIngester
*/
public abstract class AbstractPackageIngester
implements PackageIngester
{
/** log4j category */
private static Logger log = Logger.getLogger(AbstractPackageIngester.class);
/**
* References to other packages -- these are the next packages to ingest recursively
* Key = DSpace Object just ingested, Value = List of all packages relating to a DSpaceObject
**/
private Map<DSpaceObject,List<String>> packageReferences = new HashMap<DSpaceObject,List<String>>();
/** List of all successfully ingested/replaced DSpace objects */
private List<DSpaceObject> dsoIngestedList = new ArrayList<DSpaceObject>();
/**
* Recursively create one or more DSpace Objects out of the contents
* of the ingested package (and all other referenced packages).
* The initial object is created under the indicated parent. All other
* objects are created based on their relationship to the initial object.
* <p>
* For example, a scenario may be to create a Collection based on a
* collection-level package, and also create an Item for every item-level
* package referenced by the collection-level package.
* <p>
* The output of this method is one or more newly created <code>DspaceObject<code>s.
* <p>
* The packager <em>may</em> choose not to implement <code>ingestAll</code>,
* or simply forward the call to <code>ingest</code> if it is unable to support
* recursive ingestion.
* <p>
* The deposit license (Only significant for Item) is passed
* explicitly as a string since there is no place for it in many
* package formats. It is optional and may be given as
* <code>null</code>.
*
* @param context DSpace context.
* @param parent parent under which to create the initial object
* (may be null -- in which case ingester must determine parent from package
* or throw an error).
* @param pkgFile The initial package file to ingest
* @param params Properties-style list of options (interpreted by each packager).
* @param license may be null, which takes default license.
* @return List of DSpaceObjects created
*
* @throws PackageValidationException if initial package (or any referenced package)
* is unacceptable or there is a fatal error in creating a DSpaceObject
* @throws UnsupportedOperationException if this packager does not
* implement <code>ingestAll</code>
*/
@Override
public List<DSpaceObject> ingestAll(Context context, DSpaceObject parent, File pkgFile,
PackageParameters params, String license)
throws PackageException, UnsupportedOperationException,
CrosswalkException, AuthorizeException,
SQLException, IOException
{
//If unset, make sure the Parameters specifies this is a recursive ingest
if(!params.recursiveModeEnabled())
{
params.setRecursiveModeEnabled(true);
}
//Initial DSpace Object to ingest
DSpaceObject dso = null;
//try to ingest the first package
try
{
//actually ingest pkg using provided PackageIngester
dso = ingest(context, parent, pkgFile, params, license);
}
catch(IllegalStateException ie)
{
// NOTE: if we encounter an IllegalStateException, this means the
// handle is already in use and this object already exists.
//if we are skipping over (i.e. keeping) existing objects
if(params.keepExistingModeEnabled())
{
log.warn(LogManager.getHeader(context, "skip_package_ingest", "Object already exists, package-skipped=" + pkgFile));
}
else // Pass this exception on -- which essentially causes a full rollback of all changes (this is the default)
{
throw ie;
}
}
//as long as our first object was ingested successfully
if(dso!=null)
{
//add to list of successfully ingested objects
addToIngestedList(dso);
//We can only recursively ingest non-Items
//(NOTE: Items have no children, as Bitstreams/Bundles are created from Item packages)
if(dso.getType()!=Constants.ITEM)
{
//Check if we found child package references when ingesting this latest DSpaceObject
List<String> childPkgRefs = getPackageReferences(dso);
//we can only recursively ingest child packages
//if we have references to them
if(childPkgRefs!=null && !childPkgRefs.isEmpty())
{
//Recursively ingest each child package, using this current object as the parent DSpace Object
for(String childPkgRef : childPkgRefs)
{
// Remember where the additions start
int oldSize = dsoIngestedList.size();
//Assume package reference is relative to current package location
File childPkg = new File(pkgFile.getAbsoluteFile().getParent(), childPkgRef);
//fun, it's recursive! -- ingested referenced package as a child of current object
ingestAll(context, dso, childPkg, params, license);
// A Collection can map to Items that it does not "own".
// If a Collection package has an Item as a child, it
// should be mapped regardless of ownership.
// Note: Only perform this mapping if new items were ingested to this collection
if (Constants.COLLECTION == dso.getType() && dsoIngestedList.size()>oldSize)
{
// Since running 'ingestAll' on an item, will only ingest one Item at most,
// Just make sure that item is mapped to this collection.
Item childItem = (Item)dsoIngestedList.get(oldSize);
Collection collection = (Collection)dso;
if (!childItem.isIn(collection))
{
collection.addItem(childItem);
}
}
}
}//end if child pkgs
}//end if not an Item
}//end if DSpaceObject not null
//Return list of all objects ingested
return getIngestedList();
}
/**
* Recursively replace one or more DSpace Objects out of the contents
* of the ingested package (and all other referenced packages).
* The initial object to replace is indicated by <code>dso</code>. All other
* objects are replaced based on information provided in the referenced packages.
* <p>
* For example, a scenario may be to replace a Collection based on a
* collection-level package, and also replace *every* Item in that collection
* based on the item-level packages referenced by the collection-level package.
* <p>
* Please note that since the <code>dso</code> input only specifies the
* initial object to replace, any additional objects to replace must be
* determined based on the referenced packages (or initial package itself).
* <p>
* The output of this method is one or more replaced <code>DspaceObject<code>s.
* <p>
* The packager <em>may</em> choose not to implement <code>replaceAll</code>,
* since it somewhat contradicts the archival nature of DSpace. It also
* may choose to forward the call to <code>replace</code> if it is unable to
* support recursive replacement.
*
* @param context DSpace context.
* @param dso initial existing DSpace Object to be replaced, may be null
* if object to replace can be determined from package
* @param pkgFile The package file to ingest.
* @param params Properties-style list of options specific to this packager
* @return List of DSpaceObjects replaced
*
* @throws PackageValidationException if initial package (or any referenced package)
* is unacceptable or there is a fatal error in creating a DSpaceObject
* @throws UnsupportedOperationException if this packager does not
* implement <code>replaceAll</code>
*/
@Override
public List<DSpaceObject> replaceAll(Context context, DSpaceObject dso,
File pkgFile, PackageParameters params)
throws PackageException, UnsupportedOperationException,
CrosswalkException, AuthorizeException,
SQLException, IOException
{
//If unset, make sure the Parameters specifies this is a recursive replace
if(!params.recursiveModeEnabled())
{
params.setRecursiveModeEnabled(true);
}
//actually ingest pkg using provided PackageIngester, and replace object
//NOTE: 'dso' may be null! If it is null, the PackageIngester must determine
// the object to be replaced from the package itself.
DSpaceObject replacedDso = replace(context, dso, pkgFile, params);
//as long as our object was replaced successfully
if(replacedDso!=null)
{
//add to list of successfully replaced objects
addToIngestedList(replacedDso);
//We can only recursively replace non-Items
//(NOTE: Items have no children, as Bitstreams/Bundles are created from Item packages)
if(replacedDso.getType()!=Constants.ITEM)
{
//Check if we found child package references when replacing this latest DSpaceObject
List<String> childPkgRefs = getPackageReferences(replacedDso);
//we can only recursively ingest child packages
//if we have references to them
if(childPkgRefs!=null && !childPkgRefs.isEmpty())
{
//Recursively replace each child package
for(String childPkgRef : childPkgRefs)
{
// Remember where the additions start
int oldSize = dsoIngestedList.size();
//Assume package reference is relative to current package location
File childPkg = new File(pkgFile.getAbsoluteFile().getParent(), childPkgRef);
//fun, it's recursive! -- replaced referenced package as a child of current object
// Pass object to replace as 'null', as we don't know which object to replace.
replaceAll(context, null, childPkg, params);
// A Collection can map to Items that it does not "own".
// If a Collection package has an Item as a child, it
// should be mapped regardless of ownership.
// If a Collection package has an Item as a child, it
// should be mapped regardless of ownership.
// Note: Only perform this mapping if new items were ingested to this collection
if (Constants.COLLECTION == replacedDso.getType() && dsoIngestedList.size()>oldSize)
{
// Since running 'replaceAll' on an item, will only ingest one Item at most,
// Just make sure that item is mapped to this collection.
Item childItem = (Item)dsoIngestedList.get(oldSize);
Collection collection = (Collection)replacedDso;
if (!childItem.isIn(collection))
{
collection.addItem(childItem);
}
}
}
}//end if child pkgs
}//end if not an Item
}//end if DSpaceObject not null
//Return list of all objects replaced
return getIngestedList();
}
/**
* During ingestion process, some submission information packages (SIPs)
* may reference other packages to be ingested (recursively).
* <P>
* This method collects all references to other packages, so that we
* can choose to recursively ingest them, as necessary, alongside the
* DSpaceObject created from the original SIP.
* <P>
* References are collected based on the DSpaceObject created from the SIP
* (this way we keep the context of these references).
*
* @param dso DSpaceObject whose SIP referenced another package
* @param packageRef A reference to another package, which can be ingested after this one
*/
public void addPackageReference(DSpaceObject dso, String packageRef)
{
List<String> packageRefValues = null;
// Check if we already have an entry for packages reference by this object
if(packageReferences.containsKey(dso))
{
packageRefValues = packageReferences.get(dso);
}
else
{
//Create a new empty list of references
packageRefValues = new ArrayList<String>();
}
//add this package reference to existing list and save
packageRefValues.add(packageRef);
packageReferences.put(dso, packageRefValues);
}
/**
* Return a list of known SIP references from a newly created DSpaceObject.
* <P>
* These references should detail where another package exists which
* should be ingested alongside the current DSpaceObject.
* <P>
* The <code>AbstractPackageIngester</code> or an equivalent SIP handler is expected
* to understand how to deal with these package references.
*
* @param dso DSpaceObject whose SIP referenced other SIPs
* @return List of Strings which are the references to external submission ingestion packages
* (may be null if no SIPs were referenced)
*/
public List<String> getPackageReferences(DSpaceObject dso)
{
return packageReferences.get(dso);
}
/**
* Add DSpaceObject to list of successfully ingested/replaced objects
* @param dso DSpaceObject
*/
protected void addToIngestedList(DSpaceObject dso)
{
//add to list of successfully ingested objects
if(!dsoIngestedList.contains(dso))
{
dsoIngestedList.add(dso);
}
}
/**
* Return List of all DSpaceObjects which have been ingested/replaced by
* this instance of the Ingester.
* <P>
* This list can be useful in reporting back to the user what content has
* been added or replaced. It's used by ingestAll() and replaceAll() to
* return this list of everything that was ingested/replaced.
*
* @return List of DSpaceObjects which have been added/replaced
*/
protected List<DSpaceObject> getIngestedList()
{
return dsoIngestedList;
}
}

View File

@@ -1,406 +1,406 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import java.io.IOException;
import java.sql.SQLException;
import org.jdom.Element;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.Collection;
import org.dspace.content.DCValue;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.NonUniqueMetadataException;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.content.crosswalk.MetadataValidationException;
import org.dspace.core.Context;
import org.dspace.core.Constants;
/**
* Subclass of the METS packager framework to ingest a DSpace
* Archival Information Package (AIP). The AIP is intended to be, foremost,
* a _complete_ and _accurate_ representation of one object in the DSpace
* object model. An AIP contains all of the information needed to restore
* the object precisely in another DSpace archive instance.
* <p>
* This ingester recognizes two distinct types of AIPs: "Manifest-Only" and "External".
* The Manifest-Only AIP, which is selected by specifying a PackageParameters
* key "manifestOnly" with the value "true", refers to all its contents by
* reference only. For Community or Collection AIPs this means all references to their
* child objects are just via Handles. For Item AIPs all Bitreams are just
* referenced by their asset store location instead of finding them in the "package".
* The Manifest-Only AIP package format is simply a METS XML document serialized into a file.
* <p>
* An "external" AIP (the default), is a conventional Zip-file based package
* that includes copies of all bitstreams referenced by the object as well
* as a serialized METS XML document in the path "mets.xml".
*
* Configuration keys:
*
* # instructs which xwalk plugin to use for a given type of metadata
* mets.dspaceAIP.ingest.crosswalk.{mdSecName} = {pluginName}
* mets.dspaceAIP.ingest.crosswalk.DC = QDC
* mets.dspaceAIP.ingest.crosswalk.DSpaceDepositLicense = NULLSTREAM
*
* # Option to save METS manifest in the item: (default is false)
* mets.default.ingest.preserveManifest = false
*
* @author Larry Stone
* @author Tim Donohue
* @version $Revision: 1.1 $
*
* @see AbstractMETSIngester
* @see AbstractPackageIngester
* @see PackageIngester
* @see org.dspace.content.packager.METSManifest
*/
public class DSpaceAIPIngester
extends AbstractMETSIngester
{
/** log4j category */
private static Logger log = Logger.getLogger(DSpaceAIPIngester.class);
/**
* Ensure it's an AIP generated by the complementary AIP disseminator.
*/
@Override
void checkManifest(METSManifest manifest)
throws MetadataValidationException
{
String profile = manifest.getProfile();
if (profile == null)
{
throw new MetadataValidationException("Cannot accept METS with no PROFILE attribute!");
}
else if (!profile.equals(DSpaceAIPDisseminator.PROFILE_1_0))
{
throw new MetadataValidationException("METS has unacceptable PROFILE attribute, profile=" + profile);
}
}
/**
* Choose DMD section(s) to crosswalk.
* <p>
* The algorithm is:<br>
* 1. Use whatever the <code>dmd</code> parameter specifies as the primary DMD.<br>
* 2. If (1) is unspecified, find DIM (preferably) or MODS as primary DMD.<br>
* 3. If (1) or (2) succeeds, crosswalk it and ignore all other DMDs with
* same GROUPID<br>
* 4. Crosswalk remaining DMDs not eliminated already.
*/
@Override
public void crosswalkObjectDmd(Context context, DSpaceObject dso,
METSManifest manifest,
MdrefManager callback,
Element dmds[], PackageParameters params)
throws CrosswalkException, PackageValidationException,
AuthorizeException, SQLException, IOException
{
int found = -1;
// Check to see what dmdSec the user specified in the 'dmd' parameter
String userDmd = null;
if (params != null)
{
userDmd = params.getProperty("dmd");
}
if (userDmd != null && userDmd.length() > 0)
{
for (int i = 0; i < dmds.length; ++i)
{
if (userDmd.equalsIgnoreCase(manifest.getMdType(dmds[i])))
{
found = i;
}
}
}
// DIM is preferred, if nothing specified by user
if (found == -1)
{
// DIM is preferred for AIP
for (int i = 0; i < dmds.length; ++i)
{
//NOTE: METS standard actually says this should be DIM (all uppercase). But,
// just in case, we're going to be a bit more forgiving.
if ("DIM".equalsIgnoreCase(manifest.getMdType(dmds[i])))
{
found = i;
}
}
}
// MODS is acceptable otehrwise..
if (found == -1)
{
for (int i = 0; i < dmds.length; ++i)
{
//NOTE: METS standard actually says this should be MODS (all uppercase). But,
// just in case, we're going to be a bit more forgiving.
if ("MODS".equalsIgnoreCase(manifest.getMdType(dmds[i])))
{
found = i;
}
}
}
String groupID = null;
if (found >= 0)
{
manifest.crosswalkItemDmd(context, params, dso, dmds[found], callback);
groupID = dmds[found].getAttributeValue("GROUPID");
if (groupID != null)
{
for (int i = 0; i < dmds.length; ++i)
{
String g = dmds[i].getAttributeValue("GROUPID");
if (g != null && !g.equals(groupID))
{
manifest.crosswalkItemDmd(context, params, dso, dmds[i], callback);
}
}
}
}
// otherwise take the first. Don't xwalk more than one because
// each xwalk _adds_ metadata, and could add duplicate fields.
else if (dmds.length > 0)
{
manifest.crosswalkItemDmd(context, params, dso, dmds[0], callback);
}
// it's an error if there is nothing to crosswalk:
else
{
throw new MetadataValidationException("DSpaceAIPIngester: Could not find an acceptable object-wide DMD section in manifest.");
}
}
/**
* Ignore license when restoring an manifest-only AIP, since it should
* be a bitstream in the AIP already.
* Otherwise: Check item for license first; then, take deposit
* license supplied by explicit argument next, else use collection's
* default deposit license.
* Normally the rightsMD crosswalks should provide a license.
*/
@Override
public void addLicense(Context context, Item item, String license,
Collection collection, PackageParameters params)
throws PackageValidationException,
AuthorizeException, SQLException, IOException
{
boolean newLicense = false;
if(!params.restoreModeEnabled())
{
//AIP is not being restored/replaced, so treat it like a SIP -- every new SIP needs a new license
newLicense = true;
}
// Add deposit license if there isn't one in the object,
// and it's not a restoration of an "manifestOnly" AIP:
if (!params.getBooleanProperty("manifestOnly", false) &&
PackageUtils.findDepositLicense(context, item) == null)
{
newLicense = true;
}
if(newLicense)
{
PackageUtils.addDepositLicense(context, license, item, collection);
}
}
/**
* Last change to fix up a DSpace Object.
* <P>
* For AIPs, if the object is an Item, we may want to make sure all of its
* metadata fields already exist in the database (otherwise, the database
* will throw errors when we attempt to save/update the Item)
*
* @param context DSpace Context
* @param dso DSpace object
* @param params Packager Parameters
*/
@Override
public void finishObject(Context context, DSpaceObject dso, PackageParameters params)
throws PackageValidationException, CrosswalkException,
AuthorizeException, SQLException, IOException
{
if(dso.getType()==Constants.ITEM)
{
// Check if 'createMetadataFields' option is enabled (default=true)
// This defaults to true as by default we should attempt to restore as much metadata as we can.
// When 'createMetadataFields' is set to false, an ingest will fail if it attempts to ingest content to a missing metadata field.
if (params.getBooleanProperty("createMetadataFields", true))
{
// We want to verify that all the Metadata Fields we've crosswalked
// actually *exist* in the DB. If not, we'll try to create them
createMissingMetadataFields(context, (Item) dso);
}
}
}
/**
* Nothing extra to do to bitstream after ingestion.
*/
@Override
public void finishBitstream(Context context,
Bitstream bs,
Element mfile,
METSManifest manifest,
PackageParameters params)
throws MetadataValidationException, SQLException, AuthorizeException, IOException
{
// nothing to do.
}
/**
* Return the type of DSpaceObject in this package; it is
* in the TYPE attribute of the mets:mets element.
*/
@Override
public int getObjectType(METSManifest manifest)
throws PackageValidationException
{
Element mets = manifest.getMets();
String typeStr = mets.getAttributeValue("TYPE");
if (typeStr == null || typeStr.length() == 0)
{
throw new PackageValidationException("Manifest is missing the required mets@TYPE attribute.");
}
if (typeStr.startsWith("DSpace "))
{
typeStr = typeStr.substring(7);
}
int type = Constants.getTypeID(typeStr);
if (type < 0)
{
throw new PackageValidationException("Manifest has unrecognized value in mets@TYPE attribute: " + typeStr);
}
return type;
}
/**
* Name used to distinguish DSpace Configuration entries for this subclass.
*/
@Override
public String getConfigurationName()
{
return "dspaceAIP";
}
/**
* Verifies that all the unsaved, crosswalked metadata fields that have
* been added to an Item actually exist in our Database. If they don't
* exist, they are created within the proper database tables.
* <P>
* This method must be called *before* item.update(), as the call to update()
* will throw a SQLException when attempting to save any fields which
* don't already exist in the database.
* <P>
* NOTE: This will NOT create a missing Metadata Schema (e.g. "dc" schema),
* as we do not have enough info to create schemas on the fly.
*
* @param context - DSpace Context
* @param item - Item whose unsaved metadata fields we are testing
* @throws AuthorizeException if a metadata field doesn't exist and current user is not authorized to create it (i.e. not an Admin)
* @throws PackageValidationException if a metadata schema doesn't exist, as we cannot autocreate a schema
*/
protected static void createMissingMetadataFields(Context context, Item item)
throws PackageValidationException, AuthorizeException, IOException, SQLException
{
// Get all metadata fields/values currently added to this Item
DCValue allMD[] = item.getMetadata(Item.ANY, Item.ANY, Item.ANY, Item.ANY);
// For each field, we'll check if it exists. If not, we'll create it.
for(DCValue md : allMD)
{
MetadataSchema mdSchema = null;
MetadataField mdField = null;
try
{
//Try to access this Schema
mdSchema = MetadataSchema.find(context, md.schema);
//If Schema found, try to locate field from database
if(mdSchema!=null)
{
mdField = MetadataField.findByElement(context, mdSchema.getSchemaID(), md.element, md.qualifier);
}
}
catch(SQLException se)
{
//If a SQLException error is thrown, then this field does NOT exist in DB
//Set field to null, so we know we need to create it
mdField = null;
}
// If our Schema was not found, we have a problem
// We cannot easily create a Schema automatically -- as we don't know its Namespace
if(mdSchema==null)
{
throw new PackageValidationException("Unknown Metadata Schema encountered (" + md.schema + ") when attempting to ingest an Item. You will need to create this Metadata Schema in DSpace Schema Registry before the Item can be ingested.");
}
// If our Metadata Field is null, we will attempt to create it in the proper Schema
if(mdField==null)
{
try
{
//initialize field (but don't set a scope note) & create it
mdField = new MetadataField(mdSchema, md.element, md.qualifier, null);
// NOTE: Only Adminstrators can create Metadata Fields -- create() will throw an AuthorizationException for non-Admins
mdField.create(context);
//log that field was created
log.info("Located a missing metadata field (schema:'" + mdSchema.getName() +"', element:'"+ md.element +"', qualifier:'"+ md.qualifier +"') while ingesting Item. This missing field has been created in the DSpace Metadata Field Registry.");
}
catch(NonUniqueMetadataException ne)
{ // This exception should never happen, as we already checked to make sure the field doesn't exist.
// But, we'll catch it anyways so that the Java compiler doesn't get upset
throw new SQLException("Unable to create Metadata Field (element='" + md.element + "', qualifier='" + md.qualifier + "') in Schema "+ mdSchema.getName() +".", ne);
}
}
}
}
/**
* Returns a user help string which should describe the
* additional valid command-line options that this packager
* implementation will accept when using the <code>-o</code> or
* <code>--option</code> flags with the Packager script.
*
* @return a string describing additional command-line options available
* with this packager
*/
@Override
public String getParameterHelp()
{
String parentHelp = super.getParameterHelp();
//Return superclass help info, plus the extra parameters/options that this class supports
return parentHelp +
"\n\n" +
"* createMetadataFields=[boolean] " +
"If true, ingest attempts to create any missing metadata fields." +
"If false, ingest will fail if a metadata field is encountered which doesn't already exist. (default = true)" +
"\n\n" +
"* dmd=[dmdSecType] " +
"Type of the METS <dmdSec> which should be used to restore item metadata (defaults to DIM, then MODS)";
}
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import java.io.IOException;
import java.sql.SQLException;
import org.jdom.Element;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.Collection;
import org.dspace.content.DCValue;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.NonUniqueMetadataException;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.content.crosswalk.MetadataValidationException;
import org.dspace.core.Context;
import org.dspace.core.Constants;
/**
* Subclass of the METS packager framework to ingest a DSpace
* Archival Information Package (AIP). The AIP is intended to be, foremost,
* a _complete_ and _accurate_ representation of one object in the DSpace
* object model. An AIP contains all of the information needed to restore
* the object precisely in another DSpace archive instance.
* <p>
* This ingester recognizes two distinct types of AIPs: "Manifest-Only" and "External".
* The Manifest-Only AIP, which is selected by specifying a PackageParameters
* key "manifestOnly" with the value "true", refers to all its contents by
* reference only. For Community or Collection AIPs this means all references to their
* child objects are just via Handles. For Item AIPs all Bitreams are just
* referenced by their asset store location instead of finding them in the "package".
* The Manifest-Only AIP package format is simply a METS XML document serialized into a file.
* <p>
* An "external" AIP (the default), is a conventional Zip-file based package
* that includes copies of all bitstreams referenced by the object as well
* as a serialized METS XML document in the path "mets.xml".
*
* Configuration keys:
*
* # instructs which xwalk plugin to use for a given type of metadata
* mets.dspaceAIP.ingest.crosswalk.{mdSecName} = {pluginName}
* mets.dspaceAIP.ingest.crosswalk.DC = QDC
* mets.dspaceAIP.ingest.crosswalk.DSpaceDepositLicense = NULLSTREAM
*
* # Option to save METS manifest in the item: (default is false)
* mets.default.ingest.preserveManifest = false
*
* @author Larry Stone
* @author Tim Donohue
* @version $Revision: 1.1 $
*
* @see AbstractMETSIngester
* @see AbstractPackageIngester
* @see PackageIngester
* @see org.dspace.content.packager.METSManifest
*/
public class DSpaceAIPIngester
extends AbstractMETSIngester
{
/** log4j category */
private static Logger log = Logger.getLogger(DSpaceAIPIngester.class);
/**
* Ensure it's an AIP generated by the complementary AIP disseminator.
*/
@Override
void checkManifest(METSManifest manifest)
throws MetadataValidationException
{
String profile = manifest.getProfile();
if (profile == null)
{
throw new MetadataValidationException("Cannot accept METS with no PROFILE attribute!");
}
else if (!profile.equals(DSpaceAIPDisseminator.PROFILE_1_0))
{
throw new MetadataValidationException("METS has unacceptable PROFILE attribute, profile=" + profile);
}
}
/**
* Choose DMD section(s) to crosswalk.
* <p>
* The algorithm is:<br>
* 1. Use whatever the <code>dmd</code> parameter specifies as the primary DMD.<br>
* 2. If (1) is unspecified, find DIM (preferably) or MODS as primary DMD.<br>
* 3. If (1) or (2) succeeds, crosswalk it and ignore all other DMDs with
* same GROUPID<br>
* 4. Crosswalk remaining DMDs not eliminated already.
*/
@Override
public void crosswalkObjectDmd(Context context, DSpaceObject dso,
METSManifest manifest,
MdrefManager callback,
Element dmds[], PackageParameters params)
throws CrosswalkException, PackageValidationException,
AuthorizeException, SQLException, IOException
{
int found = -1;
// Check to see what dmdSec the user specified in the 'dmd' parameter
String userDmd = null;
if (params != null)
{
userDmd = params.getProperty("dmd");
}
if (userDmd != null && userDmd.length() > 0)
{
for (int i = 0; i < dmds.length; ++i)
{
if (userDmd.equalsIgnoreCase(manifest.getMdType(dmds[i])))
{
found = i;
}
}
}
// DIM is preferred, if nothing specified by user
if (found == -1)
{
// DIM is preferred for AIP
for (int i = 0; i < dmds.length; ++i)
{
//NOTE: METS standard actually says this should be DIM (all uppercase). But,
// just in case, we're going to be a bit more forgiving.
if ("DIM".equalsIgnoreCase(manifest.getMdType(dmds[i])))
{
found = i;
}
}
}
// MODS is acceptable otehrwise..
if (found == -1)
{
for (int i = 0; i < dmds.length; ++i)
{
//NOTE: METS standard actually says this should be MODS (all uppercase). But,
// just in case, we're going to be a bit more forgiving.
if ("MODS".equalsIgnoreCase(manifest.getMdType(dmds[i])))
{
found = i;
}
}
}
String groupID = null;
if (found >= 0)
{
manifest.crosswalkItemDmd(context, params, dso, dmds[found], callback);
groupID = dmds[found].getAttributeValue("GROUPID");
if (groupID != null)
{
for (int i = 0; i < dmds.length; ++i)
{
String g = dmds[i].getAttributeValue("GROUPID");
if (g != null && !g.equals(groupID))
{
manifest.crosswalkItemDmd(context, params, dso, dmds[i], callback);
}
}
}
}
// otherwise take the first. Don't xwalk more than one because
// each xwalk _adds_ metadata, and could add duplicate fields.
else if (dmds.length > 0)
{
manifest.crosswalkItemDmd(context, params, dso, dmds[0], callback);
}
// it's an error if there is nothing to crosswalk:
else
{
throw new MetadataValidationException("DSpaceAIPIngester: Could not find an acceptable object-wide DMD section in manifest.");
}
}
/**
* Ignore license when restoring an manifest-only AIP, since it should
* be a bitstream in the AIP already.
* Otherwise: Check item for license first; then, take deposit
* license supplied by explicit argument next, else use collection's
* default deposit license.
* Normally the rightsMD crosswalks should provide a license.
*/
@Override
public void addLicense(Context context, Item item, String license,
Collection collection, PackageParameters params)
throws PackageValidationException,
AuthorizeException, SQLException, IOException
{
boolean newLicense = false;
if(!params.restoreModeEnabled())
{
//AIP is not being restored/replaced, so treat it like a SIP -- every new SIP needs a new license
newLicense = true;
}
// Add deposit license if there isn't one in the object,
// and it's not a restoration of an "manifestOnly" AIP:
if (!params.getBooleanProperty("manifestOnly", false) &&
PackageUtils.findDepositLicense(context, item) == null)
{
newLicense = true;
}
if(newLicense)
{
PackageUtils.addDepositLicense(context, license, item, collection);
}
}
/**
* Last change to fix up a DSpace Object.
* <P>
* For AIPs, if the object is an Item, we may want to make sure all of its
* metadata fields already exist in the database (otherwise, the database
* will throw errors when we attempt to save/update the Item)
*
* @param context DSpace Context
* @param dso DSpace object
* @param params Packager Parameters
*/
@Override
public void finishObject(Context context, DSpaceObject dso, PackageParameters params)
throws PackageValidationException, CrosswalkException,
AuthorizeException, SQLException, IOException
{
if(dso.getType()==Constants.ITEM)
{
// Check if 'createMetadataFields' option is enabled (default=true)
// This defaults to true as by default we should attempt to restore as much metadata as we can.
// When 'createMetadataFields' is set to false, an ingest will fail if it attempts to ingest content to a missing metadata field.
if (params.getBooleanProperty("createMetadataFields", true))
{
// We want to verify that all the Metadata Fields we've crosswalked
// actually *exist* in the DB. If not, we'll try to create them
createMissingMetadataFields(context, (Item) dso);
}
}
}
/**
* Nothing extra to do to bitstream after ingestion.
*/
@Override
public void finishBitstream(Context context,
Bitstream bs,
Element mfile,
METSManifest manifest,
PackageParameters params)
throws MetadataValidationException, SQLException, AuthorizeException, IOException
{
// nothing to do.
}
/**
* Return the type of DSpaceObject in this package; it is
* in the TYPE attribute of the mets:mets element.
*/
@Override
public int getObjectType(METSManifest manifest)
throws PackageValidationException
{
Element mets = manifest.getMets();
String typeStr = mets.getAttributeValue("TYPE");
if (typeStr == null || typeStr.length() == 0)
{
throw new PackageValidationException("Manifest is missing the required mets@TYPE attribute.");
}
if (typeStr.startsWith("DSpace "))
{
typeStr = typeStr.substring(7);
}
int type = Constants.getTypeID(typeStr);
if (type < 0)
{
throw new PackageValidationException("Manifest has unrecognized value in mets@TYPE attribute: " + typeStr);
}
return type;
}
/**
* Name used to distinguish DSpace Configuration entries for this subclass.
*/
@Override
public String getConfigurationName()
{
return "dspaceAIP";
}
/**
* Verifies that all the unsaved, crosswalked metadata fields that have
* been added to an Item actually exist in our Database. If they don't
* exist, they are created within the proper database tables.
* <P>
* This method must be called *before* item.update(), as the call to update()
* will throw a SQLException when attempting to save any fields which
* don't already exist in the database.
* <P>
* NOTE: This will NOT create a missing Metadata Schema (e.g. "dc" schema),
* as we do not have enough info to create schemas on the fly.
*
* @param context - DSpace Context
* @param item - Item whose unsaved metadata fields we are testing
* @throws AuthorizeException if a metadata field doesn't exist and current user is not authorized to create it (i.e. not an Admin)
* @throws PackageValidationException if a metadata schema doesn't exist, as we cannot autocreate a schema
*/
protected static void createMissingMetadataFields(Context context, Item item)
throws PackageValidationException, AuthorizeException, IOException, SQLException
{
// Get all metadata fields/values currently added to this Item
DCValue allMD[] = item.getMetadata(Item.ANY, Item.ANY, Item.ANY, Item.ANY);
// For each field, we'll check if it exists. If not, we'll create it.
for(DCValue md : allMD)
{
MetadataSchema mdSchema = null;
MetadataField mdField = null;
try
{
//Try to access this Schema
mdSchema = MetadataSchema.find(context, md.schema);
//If Schema found, try to locate field from database
if(mdSchema!=null)
{
mdField = MetadataField.findByElement(context, mdSchema.getSchemaID(), md.element, md.qualifier);
}
}
catch(SQLException se)
{
//If a SQLException error is thrown, then this field does NOT exist in DB
//Set field to null, so we know we need to create it
mdField = null;
}
// If our Schema was not found, we have a problem
// We cannot easily create a Schema automatically -- as we don't know its Namespace
if(mdSchema==null)
{
throw new PackageValidationException("Unknown Metadata Schema encountered (" + md.schema + ") when attempting to ingest an Item. You will need to create this Metadata Schema in DSpace Schema Registry before the Item can be ingested.");
}
// If our Metadata Field is null, we will attempt to create it in the proper Schema
if(mdField==null)
{
try
{
//initialize field (but don't set a scope note) & create it
mdField = new MetadataField(mdSchema, md.element, md.qualifier, null);
// NOTE: Only Adminstrators can create Metadata Fields -- create() will throw an AuthorizationException for non-Admins
mdField.create(context);
//log that field was created
log.info("Located a missing metadata field (schema:'" + mdSchema.getName() +"', element:'"+ md.element +"', qualifier:'"+ md.qualifier +"') while ingesting Item. This missing field has been created in the DSpace Metadata Field Registry.");
}
catch(NonUniqueMetadataException ne)
{ // This exception should never happen, as we already checked to make sure the field doesn't exist.
// But, we'll catch it anyways so that the Java compiler doesn't get upset
throw new SQLException("Unable to create Metadata Field (element='" + md.element + "', qualifier='" + md.qualifier + "') in Schema "+ mdSchema.getName() +".", ne);
}
}
}
}
/**
* Returns a user help string which should describe the
* additional valid command-line options that this packager
* implementation will accept when using the <code>-o</code> or
* <code>--option</code> flags with the Packager script.
*
* @return a string describing additional command-line options available
* with this packager
*/
@Override
public String getParameterHelp()
{
String parentHelp = super.getParameterHelp();
//Return superclass help info, plus the extra parameters/options that this class supports
return parentHelp +
"\n\n" +
"* createMetadataFields=[boolean] " +
"If true, ingest attempts to create any missing metadata fields." +
"If false, ingest will fail if a metadata field is encountered which doesn't already exist. (default = true)" +
"\n\n" +
"* dmd=[dmdSecType] " +
"Type of the METS <dmdSec> which should be used to restore item metadata (defaults to DIM, then MODS)";
}
}

View File

@@ -1,363 +1,363 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.ctask.general;
// above package assignment temporary pending better aysnch release process
// package org.dspace.ctask.integrity;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager;
import org.dspace.curate.AbstractCurationTask;
import org.dspace.curate.Curator;
import org.dspace.curate.Suspendable;
/** ClamScan.java
*
* A set of methods to scan using the
* clamav daemon.
*
* TODO: add a check for the inputstream size limit
*
* @author wbossons
*/
@Suspendable(invoked= Curator.Invoked.INTERACTIVE)
public class ClamScan extends AbstractCurationTask
{
private static final int DEFAULT_CHUNK_SIZE = 4096;//2048
private static final byte[] INSTREAM = "zINSTREAM\0".getBytes();
private static final byte[] PING = "zPING\0".getBytes();
private static final byte[] STATS = "nSTATS\n".getBytes();//prefix with z
private static final byte[] IDSESSION = "zIDSESSION\0".getBytes();
private static final byte[] END = "zEND\0".getBytes();
private static final String PLUGIN_PREFIX = "clamav";
private static final String INFECTED_MESSAGE = "had virus detected.";
private static final String CLEAN_MESSAGE = "had no viruses detected.";
private static final String CONNECT_FAIL_MESSAGE = "Unable to connect to virus service - check setup";
private static final String SCAN_FAIL_MESSAGE = "Error encountered using virus service - check setup";
private static final String NEW_ITEM_HANDLE = "in workflow";
private static Logger log = Logger.getLogger(ClamScan.class);
private static String host = null;
private static int port = 0;
private static int timeout = 0;
private static boolean failfast = true;
private int status = Curator.CURATE_UNSET;
private List<String> results = null;
private Socket socket = null;
private DataOutputStream dataOutputStream = null;
@Override
public void init(Curator curator, String taskId) throws IOException
{
super.init(curator, taskId);
host = ConfigurationManager.getProperty(PLUGIN_PREFIX, "service.host");
port = ConfigurationManager.getIntProperty(PLUGIN_PREFIX, "service.port");
timeout = ConfigurationManager.getIntProperty(PLUGIN_PREFIX, "socket.timeout");
failfast = ConfigurationManager.getBooleanProperty(PLUGIN_PREFIX, "scan.failfast");
}
@Override
public int perform(DSpaceObject dso) throws IOException
{
status = Curator.CURATE_SKIP;
logDebugMessage("The target dso is " + dso.getName());
if (dso instanceof Item)
{
status = Curator.CURATE_SUCCESS;
Item item = (Item)dso;
try
{
openSession();
}
catch (IOException ioE)
{
// no point going further - set result and error out
closeSession();
setResult(CONNECT_FAIL_MESSAGE);
return Curator.CURATE_ERROR;
}
try
{
Bundle bundle = item.getBundles("ORIGINAL")[0];
results = new ArrayList<String>();
for (Bitstream bitstream : bundle.getBitstreams())
{
InputStream inputstream = bitstream.retrieve();
logDebugMessage("Scanning " + bitstream.getName() + " . . . ");
int bstatus = scan(bitstream, inputstream, getItemHandle(item));
inputstream.close();
if (bstatus == Curator.CURATE_ERROR)
{
// no point going further - set result and error out
setResult(SCAN_FAIL_MESSAGE);
status = bstatus;
break;
}
if (failfast && bstatus == Curator.CURATE_FAIL)
{
status = bstatus;
break;
}
else if (bstatus == Curator.CURATE_FAIL &&
status == Curator.CURATE_SUCCESS)
{
status = bstatus;
}
}
}
catch (AuthorizeException authE)
{
throw new IOException(authE.getMessage(), authE);
}
catch (SQLException sqlE)
{
throw new IOException(sqlE.getMessage(), sqlE);
}
finally
{
closeSession();
}
if (status != Curator.CURATE_ERROR)
{
formatResults(item);
}
}
return status;
}
/** openSession
*
* This method opens a session.
*/
private void openSession() throws IOException
{
socket = new Socket();
try
{
logDebugMessage("Connecting to " + host + ":" + port);
socket.connect(new InetSocketAddress(host, port));
}
catch (IOException e)
{
log.error("Failed to connect to clamd . . .", e);
throw (e);
}
try
{
socket.setSoTimeout(timeout);
}
catch (SocketException e)
{
log.error("Could not set socket timeout . . . " + timeout + "ms", e);
throw (new IOException(e));
}
try
{
dataOutputStream = new DataOutputStream(socket.getOutputStream());
}
catch (IOException e)
{
log.error("Failed to open OutputStream . . . ", e);
throw (e);
}
try
{
dataOutputStream.write(IDSESSION);
}
catch (IOException e)
{
log.error("Error initiating session with IDSESSION command . . . ", e);
throw (e);
}
}
/** closeSession
*
* Close the IDSESSION in CLAMD
*
*
*/
private void closeSession()
{
if (dataOutputStream != null)
{
try
{
dataOutputStream.write(END);
}
catch (IOException e)
{
log.error("Exception closing dataOutputStream", e);
}
}
try
{
logDebugMessage("Closing the socket for ClamAv daemon . . . ");
socket.close();
}
catch (IOException e)
{
log.error("Exception closing socket", e);
}
}
/** scan
*
* Issue the INSTREAM command and return the response to
* and from the clamav daemon
*
* @param the bitstream for reporting results
* @param the InputStream to read
* @param the item handle for reporting results
* @return a ScanResult representing the server response
* @throws IOException
*/
final static byte[] buffer = new byte[DEFAULT_CHUNK_SIZE];;
private int scan(Bitstream bitstream, InputStream inputstream, String itemHandle)
{
try
{
dataOutputStream.write(INSTREAM);
}
catch (IOException e)
{
log.error("Error writing INSTREAM command . . .");
return Curator.CURATE_ERROR;
}
int read = DEFAULT_CHUNK_SIZE;
while (read == DEFAULT_CHUNK_SIZE)
{
try
{
read = inputstream.read(buffer);
}
catch (IOException e)
{
log.error("Failed attempting to read the InputStream . . . ");
return Curator.CURATE_ERROR;
}
if (read == -1)
{
break;
}
try
{
dataOutputStream.writeInt(read);
dataOutputStream.write(buffer, 0, read);
}
catch (IOException e)
{
log.error("Could not write to the socket . . . ");
return Curator.CURATE_ERROR;
}
}
try
{
dataOutputStream.writeInt(0);
dataOutputStream.flush();
}
catch (IOException e)
{
log.error("Error writing zero-length chunk to socket") ;
return Curator.CURATE_ERROR;
}
try
{
read = socket.getInputStream().read(buffer);
}
catch (IOException e)
{
log.error( "Error reading result from socket");
return Curator.CURATE_ERROR;
}
if (read > 0)
{
String response = new String(buffer, 0, read);
logDebugMessage("Response: " + response);
if (response.indexOf("FOUND") != -1)
{
String itemMsg = "item - " + itemHandle + ": ";
String bsMsg = "bitstream - " + bitstream.getName() +
": SequenceId - " + bitstream.getSequenceID() + ": infected";
report(itemMsg + bsMsg);
results.add(bsMsg);
return Curator.CURATE_FAIL;
}
else
{
return Curator.CURATE_SUCCESS;
}
}
return Curator.CURATE_ERROR;
}
private void formatResults(Item item) throws IOException
{
StringBuilder sb = new StringBuilder();
sb.append("Item: ").append(getItemHandle(item)).append(" ");
if (status == Curator.CURATE_FAIL)
{
sb.append(INFECTED_MESSAGE);
int count = 0;
for (String scanresult : results)
{
sb.append("\n").append(scanresult).append("\n");
count++;
}
sb.append(count).append(" virus(es) found. ")
.append(" failfast: ").append(failfast);
}
else
{
sb.append(CLEAN_MESSAGE);
}
setResult(sb.toString());
}
private static String getItemHandle(Item item)
{
String handle = item.getHandle();
return (handle != null) ? handle: NEW_ITEM_HANDLE;
}
private void logDebugMessage(String message)
{
if (log.isDebugEnabled())
{
log.debug(message);
}
}
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.ctask.general;
// above package assignment temporary pending better aysnch release process
// package org.dspace.ctask.integrity;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager;
import org.dspace.curate.AbstractCurationTask;
import org.dspace.curate.Curator;
import org.dspace.curate.Suspendable;
/** ClamScan.java
*
* A set of methods to scan using the
* clamav daemon.
*
* TODO: add a check for the inputstream size limit
*
* @author wbossons
*/
@Suspendable(invoked= Curator.Invoked.INTERACTIVE)
public class ClamScan extends AbstractCurationTask
{
private static final int DEFAULT_CHUNK_SIZE = 4096;//2048
private static final byte[] INSTREAM = "zINSTREAM\0".getBytes();
private static final byte[] PING = "zPING\0".getBytes();
private static final byte[] STATS = "nSTATS\n".getBytes();//prefix with z
private static final byte[] IDSESSION = "zIDSESSION\0".getBytes();
private static final byte[] END = "zEND\0".getBytes();
private static final String PLUGIN_PREFIX = "clamav";
private static final String INFECTED_MESSAGE = "had virus detected.";
private static final String CLEAN_MESSAGE = "had no viruses detected.";
private static final String CONNECT_FAIL_MESSAGE = "Unable to connect to virus service - check setup";
private static final String SCAN_FAIL_MESSAGE = "Error encountered using virus service - check setup";
private static final String NEW_ITEM_HANDLE = "in workflow";
private static Logger log = Logger.getLogger(ClamScan.class);
private static String host = null;
private static int port = 0;
private static int timeout = 0;
private static boolean failfast = true;
private int status = Curator.CURATE_UNSET;
private List<String> results = null;
private Socket socket = null;
private DataOutputStream dataOutputStream = null;
@Override
public void init(Curator curator, String taskId) throws IOException
{
super.init(curator, taskId);
host = ConfigurationManager.getProperty(PLUGIN_PREFIX, "service.host");
port = ConfigurationManager.getIntProperty(PLUGIN_PREFIX, "service.port");
timeout = ConfigurationManager.getIntProperty(PLUGIN_PREFIX, "socket.timeout");
failfast = ConfigurationManager.getBooleanProperty(PLUGIN_PREFIX, "scan.failfast");
}
@Override
public int perform(DSpaceObject dso) throws IOException
{
status = Curator.CURATE_SKIP;
logDebugMessage("The target dso is " + dso.getName());
if (dso instanceof Item)
{
status = Curator.CURATE_SUCCESS;
Item item = (Item)dso;
try
{
openSession();
}
catch (IOException ioE)
{
// no point going further - set result and error out
closeSession();
setResult(CONNECT_FAIL_MESSAGE);
return Curator.CURATE_ERROR;
}
try
{
Bundle bundle = item.getBundles("ORIGINAL")[0];
results = new ArrayList<String>();
for (Bitstream bitstream : bundle.getBitstreams())
{
InputStream inputstream = bitstream.retrieve();
logDebugMessage("Scanning " + bitstream.getName() + " . . . ");
int bstatus = scan(bitstream, inputstream, getItemHandle(item));
inputstream.close();
if (bstatus == Curator.CURATE_ERROR)
{
// no point going further - set result and error out
setResult(SCAN_FAIL_MESSAGE);
status = bstatus;
break;
}
if (failfast && bstatus == Curator.CURATE_FAIL)
{
status = bstatus;
break;
}
else if (bstatus == Curator.CURATE_FAIL &&
status == Curator.CURATE_SUCCESS)
{
status = bstatus;
}
}
}
catch (AuthorizeException authE)
{
throw new IOException(authE.getMessage(), authE);
}
catch (SQLException sqlE)
{
throw new IOException(sqlE.getMessage(), sqlE);
}
finally
{
closeSession();
}
if (status != Curator.CURATE_ERROR)
{
formatResults(item);
}
}
return status;
}
/** openSession
*
* This method opens a session.
*/
private void openSession() throws IOException
{
socket = new Socket();
try
{
logDebugMessage("Connecting to " + host + ":" + port);
socket.connect(new InetSocketAddress(host, port));
}
catch (IOException e)
{
log.error("Failed to connect to clamd . . .", e);
throw (e);
}
try
{
socket.setSoTimeout(timeout);
}
catch (SocketException e)
{
log.error("Could not set socket timeout . . . " + timeout + "ms", e);
throw (new IOException(e));
}
try
{
dataOutputStream = new DataOutputStream(socket.getOutputStream());
}
catch (IOException e)
{
log.error("Failed to open OutputStream . . . ", e);
throw (e);
}
try
{
dataOutputStream.write(IDSESSION);
}
catch (IOException e)
{
log.error("Error initiating session with IDSESSION command . . . ", e);
throw (e);
}
}
/** closeSession
*
* Close the IDSESSION in CLAMD
*
*
*/
private void closeSession()
{
if (dataOutputStream != null)
{
try
{
dataOutputStream.write(END);
}
catch (IOException e)
{
log.error("Exception closing dataOutputStream", e);
}
}
try
{
logDebugMessage("Closing the socket for ClamAv daemon . . . ");
socket.close();
}
catch (IOException e)
{
log.error("Exception closing socket", e);
}
}
/** scan
*
* Issue the INSTREAM command and return the response to
* and from the clamav daemon
*
* @param the bitstream for reporting results
* @param the InputStream to read
* @param the item handle for reporting results
* @return a ScanResult representing the server response
* @throws IOException
*/
final static byte[] buffer = new byte[DEFAULT_CHUNK_SIZE];;
private int scan(Bitstream bitstream, InputStream inputstream, String itemHandle)
{
try
{
dataOutputStream.write(INSTREAM);
}
catch (IOException e)
{
log.error("Error writing INSTREAM command . . .");
return Curator.CURATE_ERROR;
}
int read = DEFAULT_CHUNK_SIZE;
while (read == DEFAULT_CHUNK_SIZE)
{
try
{
read = inputstream.read(buffer);
}
catch (IOException e)
{
log.error("Failed attempting to read the InputStream . . . ");
return Curator.CURATE_ERROR;
}
if (read == -1)
{
break;
}
try
{
dataOutputStream.writeInt(read);
dataOutputStream.write(buffer, 0, read);
}
catch (IOException e)
{
log.error("Could not write to the socket . . . ");
return Curator.CURATE_ERROR;
}
}
try
{
dataOutputStream.writeInt(0);
dataOutputStream.flush();
}
catch (IOException e)
{
log.error("Error writing zero-length chunk to socket") ;
return Curator.CURATE_ERROR;
}
try
{
read = socket.getInputStream().read(buffer);
}
catch (IOException e)
{
log.error( "Error reading result from socket");
return Curator.CURATE_ERROR;
}
if (read > 0)
{
String response = new String(buffer, 0, read);
logDebugMessage("Response: " + response);
if (response.indexOf("FOUND") != -1)
{
String itemMsg = "item - " + itemHandle + ": ";
String bsMsg = "bitstream - " + bitstream.getName() +
": SequenceId - " + bitstream.getSequenceID() + ": infected";
report(itemMsg + bsMsg);
results.add(bsMsg);
return Curator.CURATE_FAIL;
}
else
{
return Curator.CURATE_SUCCESS;
}
}
return Curator.CURATE_ERROR;
}
private void formatResults(Item item) throws IOException
{
StringBuilder sb = new StringBuilder();
sb.append("Item: ").append(getItemHandle(item)).append(" ");
if (status == Curator.CURATE_FAIL)
{
sb.append(INFECTED_MESSAGE);
int count = 0;
for (String scanresult : results)
{
sb.append("\n").append(scanresult).append("\n");
count++;
}
sb.append(count).append(" virus(es) found. ")
.append(" failfast: ").append(failfast);
}
else
{
sb.append(CLEAN_MESSAGE);
}
setResult(sb.toString());
}
private static String getItemHandle(Item item)
{
String handle = item.getHandle();
return (handle != null) ? handle: NEW_ITEM_HANDLE;
}
private void logDebugMessage(String message)
{
if (log.isDebugEnabled())
{
log.debug(message);
}
}
}

View File

@@ -1,421 +1,421 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.harvest;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRowIterator;
import org.dspace.storage.rdbms.TableRow;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
/**
* @author Alexey Maslov
*/
public class HarvestedCollection
{
private Context context;
private TableRow harvestRow;
public static final int TYPE_NONE = 0;
public static final int TYPE_DMD = 1;
public static final int TYPE_DMDREF = 2;
public static final int TYPE_FULL = 3;
public static final int STATUS_READY = 0;
public static final int STATUS_BUSY = 1;
public static final int STATUS_QUEUED = 2;
public static final int STATUS_OAI_ERROR = 3;
public static final int STATUS_UNKNOWN_ERROR = -1;
/*
* collection_id | integer | not null
harvest_type | integer |
oai_source | text |
oai_set_id | text |
harvest_message | text |
metadata_config_id | text |
harvest_status | integer |
harvest_start_time | timestamp with time zone |
*/
// TODO: make sure this guy knows to lock people out if the status is not zero.
// i.e. someone editing a collection's setting from the admin menu should have
// to stop an ongoing harvest before they can edit the settings.
HarvestedCollection(Context c, TableRow row)
{
context = c;
harvestRow = row;
}
public static void exists(Context c) throws SQLException {
DatabaseManager.queryTable(c, "harvested_collection", "SELECT COUNT(*) FROM harvested_collection");
}
/**
* Find the harvest settings corresponding to this collection
* @return a HarvestInstance object corresponding to this collection's settings, null if not found.
*/
public static HarvestedCollection find(Context c, int collectionId) throws SQLException
{
TableRow row = DatabaseManager.findByUnique(c, "harvested_collection", "collection_id", collectionId);
if (row == null) {
return null;
}
return new HarvestedCollection(c, row);
}
/**
* Create a new harvest instance row for a specified collection.
* @return a new HarvestInstance object
*/
public static HarvestedCollection create(Context c, int collectionId) throws SQLException {
TableRow row = DatabaseManager.row("harvested_collection");
row.setColumn("collection_id", collectionId);
row.setColumn("harvest_type", 0);
DatabaseManager.insert(c, row);
return new HarvestedCollection(c, row);
}
/** Returns whether the specified collection is harvestable, i.e. whether its harvesting
* options are set up correctly. This is distinct from "ready", since this collection may
* be in process of being harvested.
*/
public static boolean isHarvestable(Context c, int collectionId) throws SQLException
{
HarvestedCollection hc = HarvestedCollection.find(c, collectionId);
if (hc != null && hc.getHarvestType() > 0 && hc.getOaiSource() != null && hc.getOaiSetId() != null &&
hc.getHarvestStatus() != HarvestedCollection.STATUS_UNKNOWN_ERROR) {
return true;
}
return false;
}
/** Returns whether this harvest instance is actually harvestable, i.e. whether its settings
* options are set up correctly. This is distinct from "ready", since this collection may
* be in process of being harvested.
*/
public boolean isHarvestable() throws SQLException
{
if (this.getHarvestType() > 0 && this.getOaiSource() != null && this.getOaiSetId() != null &&
this.getHarvestStatus() != HarvestedCollection.STATUS_UNKNOWN_ERROR) {
return true;
}
return false;
}
/** Returns whether the specified collection is ready for immediate harvest.
*/
public static boolean isReady(Context c, int collectionId) throws SQLException
{
HarvestedCollection hc = HarvestedCollection.find(c, collectionId);
return hc.isReady();
}
public boolean isReady() throws SQLException
{
if (this.isHarvestable() && (this.getHarvestStatus() == HarvestedCollection.STATUS_READY || this.getHarvestStatus() == HarvestedCollection.STATUS_OAI_ERROR))
{
return true;
}
return false;
}
/** Find all collections that are set up for harvesting
*
* return: list of collection id's
* @throws SQLException
*/
public static List<Integer> findAll(Context c) throws SQLException
{
TableRowIterator tri = DatabaseManager.queryTable(c, "harvested_collection",
"SELECT * FROM harvested_collection");
List<Integer> collectionIds = new ArrayList<Integer>();
while (tri.hasNext())
{
TableRow row = tri.next();
collectionIds.add(row.getIntColumn("collection_id"));
}
return collectionIds;
}
/** Find all collections that are ready for harvesting
*
* return: list of collection id's
* @throws SQLException
*/
public static List<Integer> findReady(Context c) throws SQLException
{
int harvestInterval = ConfigurationManager.getIntProperty("oai", "harvester.harvestFrequency");
if (harvestInterval == 0)
{
harvestInterval = 720;
}
int expirationInterval = ConfigurationManager.getIntProperty("oai", "harvester.threadTimeout");
if (expirationInterval == 0)
{
expirationInterval = 24;
}
Date startTime;
Date expirationTime;
Calendar calendar = Calendar.getInstance();
calendar.setTime(new Date());
calendar.add(Calendar.MINUTE, -1 * harvestInterval);
startTime = calendar.getTime();
calendar.setTime(startTime);
calendar.add(Calendar.HOUR, -2 * expirationInterval);
expirationTime = calendar.getTime();
/* Select all collections whose last_harvest is before our start time, whose harvest_type *is not* 0 and whose status *is* 0 (available) or 3 (OAI Error). */
TableRowIterator tri = DatabaseManager.queryTable(c, "harvested_collection",
"SELECT * FROM harvested_collection WHERE (last_harvested < ? or last_harvested is null) and harvest_type > ? and (harvest_status = ? or harvest_status = ? or (harvest_status=? and harvest_start_time < ?)) ORDER BY last_harvested",
new java.sql.Timestamp(startTime.getTime()), 0, HarvestedCollection.STATUS_READY, HarvestedCollection.STATUS_OAI_ERROR, HarvestedCollection.STATUS_BUSY, new java.sql.Timestamp(expirationTime.getTime()));
List<Integer> collectionIds = new ArrayList<Integer>();
while (tri.hasNext())
{
TableRow row = tri.next();
collectionIds.add(row.getIntColumn("collection_id"));
}
return collectionIds;
}
/**
* Find all collections with the specified status flag
* @param c
* @param status see HarvestInstance.STATUS_...
* @return
* @throws SQLException
*/
public static List<Integer> findByStatus(Context c, int status) throws SQLException {
TableRowIterator tri = DatabaseManager.queryTable(c, "harvested_collection",
"SELECT * FROM harvested_collection WHERE harvest_status = ?", status);
List<Integer> collectionIds = new ArrayList<Integer>();
while (tri.hasNext())
{
TableRow row = tri.next();
collectionIds.add(row.getIntColumn("collection_id"));
}
return collectionIds;
}
/** Find the collection that was harvested the longest time ago.
* @throws SQLException
*/
public static Integer findOldestHarvest (Context c) throws SQLException {
String query = "select collection_id from harvested_collection where harvest_type > ? and harvest_status = ? order by last_harvested asc limit 1";
if ("oracle".equals(ConfigurationManager.getProperty("db.name")))
{
query = "select collection_id from harvested_collection where harvest_type > ? and harvest_status = ? and rownum <= 1 order by last_harvested asc";
}
TableRowIterator tri = DatabaseManager.queryTable(c, "harvested_collection",
query, 0, 0);
TableRow row = tri.next();
if (row != null)
{
return row.getIntColumn("collection_id");
}
else
{
return -1;
}
}
/** Find the collection that was harvested most recently.
* @throws SQLException
*/
public static Integer findNewestHarvest (Context c) throws SQLException {
String query = "select collection_id from harvested_collection where harvest_type > ? and harvest_status = ? order by last_harvested desc limit 1";
if ("oracle".equals(ConfigurationManager.getProperty("db.name")))
{
query = "select collection_id from harvested_collection where harvest_type > ? and harvest_status = ? and rownum <= 1 order by last_harvested desc";
}
TableRowIterator tri = DatabaseManager.queryTable(c, "harvested_collection",
query , 0, 0);
TableRow row = tri.next();
if (row != null)
{
return row.getIntColumn("collection_id");
}
else
{
return -1;
}
}
/**
* A function to set all harvesting-related parameters at once
*/
public void setHarvestParams(int type, String oaiSource, String oaiSetId, String mdConfigId) {
setHarvestType(type);
setOaiSource(oaiSource);
setOaiSetId(oaiSetId);
setHarvestMetadataConfig(mdConfigId);
}
/* Setters for the appropriate harvesting-related columns */
public void setHarvestType(int type) {
harvestRow.setColumn("harvest_type",type);
}
/**
* Sets the current status of the collection.
*
* @param status a HarvestInstance.STATUS_... constant
*/
public void setHarvestStatus(int status) {
harvestRow.setColumn("harvest_status",status);
}
public void setOaiSource(String oaiSource) {
if (oaiSource == null || oaiSource.length() == 0) {
harvestRow.setColumnNull("oai_source");
}
else {
harvestRow.setColumn("oai_source",oaiSource);
}
}
public void setOaiSetId(String oaiSetId) {
if (oaiSetId == null || oaiSetId.length() == 0) {
harvestRow.setColumnNull("oai_set_id");
}
else {
harvestRow.setColumn("oai_set_id",oaiSetId);
}
}
public void setHarvestMetadataConfig(String mdConfigId) {
if (mdConfigId == null || mdConfigId.length() == 0) {
harvestRow.setColumnNull("metadata_config_id");
}
else {
harvestRow.setColumn("metadata_config_id",mdConfigId);
}
}
public void setHarvestResult(Date date, String message) {
if (date == null) {
harvestRow.setColumnNull("last_harvested");
} else {
harvestRow.setColumn("last_harvested", date);
}
if (message == null || message.length() == 0) {
harvestRow.setColumnNull("harvest_message");
} else {
harvestRow.setColumn("harvest_message", message);
}
}
public void setHarvestMessage(String message) {
if (message == null || message.length() == 0) {
harvestRow.setColumnNull("harvest_message");
} else {
harvestRow.setColumn("harvest_message", message);
}
}
public void setHarvestStartTime(Date date) {
if (date == null) {
harvestRow.setColumnNull("harvest_start_time");
} else {
harvestRow.setColumn("harvest_start_time", date);
}
}
/* Getting for the appropriate harvesting-related columns */
public int getCollectionId() {
return harvestRow.getIntColumn("collection_id");
}
public int getHarvestType() {
return harvestRow.getIntColumn("harvest_type");
}
public int getHarvestStatus() {
return harvestRow.getIntColumn("harvest_status");
}
public String getOaiSource() {
return harvestRow.getStringColumn("oai_source");
}
public String getOaiSetId() {
return harvestRow.getStringColumn("oai_set_id");
}
public String getHarvestMetadataConfig() {
return harvestRow.getStringColumn("metadata_config_id");
}
public String getHarvestMessage() {
return harvestRow.getStringColumn("harvest_message");
}
public Date getHarvestDate() {
return harvestRow.getDateColumn("last_harvested");
}
public Date getHarvestStartTime() {
return harvestRow.getDateColumn("harvest_start_time");
}
public void delete() throws SQLException {
DatabaseManager.delete(context, harvestRow);
}
public void update() throws SQLException, IOException, AuthorizeException
{
DatabaseManager.update(context, harvestRow);
}
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.harvest;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRowIterator;
import org.dspace.storage.rdbms.TableRow;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
/**
* @author Alexey Maslov
*/
public class HarvestedCollection
{
private Context context;
private TableRow harvestRow;
public static final int TYPE_NONE = 0;
public static final int TYPE_DMD = 1;
public static final int TYPE_DMDREF = 2;
public static final int TYPE_FULL = 3;
public static final int STATUS_READY = 0;
public static final int STATUS_BUSY = 1;
public static final int STATUS_QUEUED = 2;
public static final int STATUS_OAI_ERROR = 3;
public static final int STATUS_UNKNOWN_ERROR = -1;
/*
* collection_id | integer | not null
harvest_type | integer |
oai_source | text |
oai_set_id | text |
harvest_message | text |
metadata_config_id | text |
harvest_status | integer |
harvest_start_time | timestamp with time zone |
*/
// TODO: make sure this guy knows to lock people out if the status is not zero.
// i.e. someone editing a collection's setting from the admin menu should have
// to stop an ongoing harvest before they can edit the settings.
HarvestedCollection(Context c, TableRow row)
{
context = c;
harvestRow = row;
}
public static void exists(Context c) throws SQLException {
DatabaseManager.queryTable(c, "harvested_collection", "SELECT COUNT(*) FROM harvested_collection");
}
/**
* Find the harvest settings corresponding to this collection
* @return a HarvestInstance object corresponding to this collection's settings, null if not found.
*/
public static HarvestedCollection find(Context c, int collectionId) throws SQLException
{
TableRow row = DatabaseManager.findByUnique(c, "harvested_collection", "collection_id", collectionId);
if (row == null) {
return null;
}
return new HarvestedCollection(c, row);
}
/**
* Create a new harvest instance row for a specified collection.
* @return a new HarvestInstance object
*/
public static HarvestedCollection create(Context c, int collectionId) throws SQLException {
TableRow row = DatabaseManager.row("harvested_collection");
row.setColumn("collection_id", collectionId);
row.setColumn("harvest_type", 0);
DatabaseManager.insert(c, row);
return new HarvestedCollection(c, row);
}
/** Returns whether the specified collection is harvestable, i.e. whether its harvesting
* options are set up correctly. This is distinct from "ready", since this collection may
* be in process of being harvested.
*/
public static boolean isHarvestable(Context c, int collectionId) throws SQLException
{
HarvestedCollection hc = HarvestedCollection.find(c, collectionId);
if (hc != null && hc.getHarvestType() > 0 && hc.getOaiSource() != null && hc.getOaiSetId() != null &&
hc.getHarvestStatus() != HarvestedCollection.STATUS_UNKNOWN_ERROR) {
return true;
}
return false;
}
/** Returns whether this harvest instance is actually harvestable, i.e. whether its settings
* options are set up correctly. This is distinct from "ready", since this collection may
* be in process of being harvested.
*/
public boolean isHarvestable() throws SQLException
{
if (this.getHarvestType() > 0 && this.getOaiSource() != null && this.getOaiSetId() != null &&
this.getHarvestStatus() != HarvestedCollection.STATUS_UNKNOWN_ERROR) {
return true;
}
return false;
}
/** Returns whether the specified collection is ready for immediate harvest.
*/
public static boolean isReady(Context c, int collectionId) throws SQLException
{
HarvestedCollection hc = HarvestedCollection.find(c, collectionId);
return hc.isReady();
}
public boolean isReady() throws SQLException
{
if (this.isHarvestable() && (this.getHarvestStatus() == HarvestedCollection.STATUS_READY || this.getHarvestStatus() == HarvestedCollection.STATUS_OAI_ERROR))
{
return true;
}
return false;
}
/** Find all collections that are set up for harvesting
*
* return: list of collection id's
* @throws SQLException
*/
public static List<Integer> findAll(Context c) throws SQLException
{
TableRowIterator tri = DatabaseManager.queryTable(c, "harvested_collection",
"SELECT * FROM harvested_collection");
List<Integer> collectionIds = new ArrayList<Integer>();
while (tri.hasNext())
{
TableRow row = tri.next();
collectionIds.add(row.getIntColumn("collection_id"));
}
return collectionIds;
}
/** Find all collections that are ready for harvesting
*
* return: list of collection id's
* @throws SQLException
*/
public static List<Integer> findReady(Context c) throws SQLException
{
int harvestInterval = ConfigurationManager.getIntProperty("oai", "harvester.harvestFrequency");
if (harvestInterval == 0)
{
harvestInterval = 720;
}
int expirationInterval = ConfigurationManager.getIntProperty("oai", "harvester.threadTimeout");
if (expirationInterval == 0)
{
expirationInterval = 24;
}
Date startTime;
Date expirationTime;
Calendar calendar = Calendar.getInstance();
calendar.setTime(new Date());
calendar.add(Calendar.MINUTE, -1 * harvestInterval);
startTime = calendar.getTime();
calendar.setTime(startTime);
calendar.add(Calendar.HOUR, -2 * expirationInterval);
expirationTime = calendar.getTime();
/* Select all collections whose last_harvest is before our start time, whose harvest_type *is not* 0 and whose status *is* 0 (available) or 3 (OAI Error). */
TableRowIterator tri = DatabaseManager.queryTable(c, "harvested_collection",
"SELECT * FROM harvested_collection WHERE (last_harvested < ? or last_harvested is null) and harvest_type > ? and (harvest_status = ? or harvest_status = ? or (harvest_status=? and harvest_start_time < ?)) ORDER BY last_harvested",
new java.sql.Timestamp(startTime.getTime()), 0, HarvestedCollection.STATUS_READY, HarvestedCollection.STATUS_OAI_ERROR, HarvestedCollection.STATUS_BUSY, new java.sql.Timestamp(expirationTime.getTime()));
List<Integer> collectionIds = new ArrayList<Integer>();
while (tri.hasNext())
{
TableRow row = tri.next();
collectionIds.add(row.getIntColumn("collection_id"));
}
return collectionIds;
}
/**
* Find all collections with the specified status flag
* @param c
* @param status see HarvestInstance.STATUS_...
* @return
* @throws SQLException
*/
public static List<Integer> findByStatus(Context c, int status) throws SQLException {
TableRowIterator tri = DatabaseManager.queryTable(c, "harvested_collection",
"SELECT * FROM harvested_collection WHERE harvest_status = ?", status);
List<Integer> collectionIds = new ArrayList<Integer>();
while (tri.hasNext())
{
TableRow row = tri.next();
collectionIds.add(row.getIntColumn("collection_id"));
}
return collectionIds;
}
/** Find the collection that was harvested the longest time ago.
* @throws SQLException
*/
public static Integer findOldestHarvest (Context c) throws SQLException {
String query = "select collection_id from harvested_collection where harvest_type > ? and harvest_status = ? order by last_harvested asc limit 1";
if ("oracle".equals(ConfigurationManager.getProperty("db.name")))
{
query = "select collection_id from harvested_collection where harvest_type > ? and harvest_status = ? and rownum <= 1 order by last_harvested asc";
}
TableRowIterator tri = DatabaseManager.queryTable(c, "harvested_collection",
query, 0, 0);
TableRow row = tri.next();
if (row != null)
{
return row.getIntColumn("collection_id");
}
else
{
return -1;
}
}
/** Find the collection that was harvested most recently.
* @throws SQLException
*/
public static Integer findNewestHarvest (Context c) throws SQLException {
String query = "select collection_id from harvested_collection where harvest_type > ? and harvest_status = ? order by last_harvested desc limit 1";
if ("oracle".equals(ConfigurationManager.getProperty("db.name")))
{
query = "select collection_id from harvested_collection where harvest_type > ? and harvest_status = ? and rownum <= 1 order by last_harvested desc";
}
TableRowIterator tri = DatabaseManager.queryTable(c, "harvested_collection",
query , 0, 0);
TableRow row = tri.next();
if (row != null)
{
return row.getIntColumn("collection_id");
}
else
{
return -1;
}
}
/**
* A function to set all harvesting-related parameters at once
*/
public void setHarvestParams(int type, String oaiSource, String oaiSetId, String mdConfigId) {
setHarvestType(type);
setOaiSource(oaiSource);
setOaiSetId(oaiSetId);
setHarvestMetadataConfig(mdConfigId);
}
/* Setters for the appropriate harvesting-related columns */
public void setHarvestType(int type) {
harvestRow.setColumn("harvest_type",type);
}
/**
* Sets the current status of the collection.
*
* @param status a HarvestInstance.STATUS_... constant
*/
public void setHarvestStatus(int status) {
harvestRow.setColumn("harvest_status",status);
}
public void setOaiSource(String oaiSource) {
if (oaiSource == null || oaiSource.length() == 0) {
harvestRow.setColumnNull("oai_source");
}
else {
harvestRow.setColumn("oai_source",oaiSource);
}
}
public void setOaiSetId(String oaiSetId) {
if (oaiSetId == null || oaiSetId.length() == 0) {
harvestRow.setColumnNull("oai_set_id");
}
else {
harvestRow.setColumn("oai_set_id",oaiSetId);
}
}
public void setHarvestMetadataConfig(String mdConfigId) {
if (mdConfigId == null || mdConfigId.length() == 0) {
harvestRow.setColumnNull("metadata_config_id");
}
else {
harvestRow.setColumn("metadata_config_id",mdConfigId);
}
}
public void setHarvestResult(Date date, String message) {
if (date == null) {
harvestRow.setColumnNull("last_harvested");
} else {
harvestRow.setColumn("last_harvested", date);
}
if (message == null || message.length() == 0) {
harvestRow.setColumnNull("harvest_message");
} else {
harvestRow.setColumn("harvest_message", message);
}
}
public void setHarvestMessage(String message) {
if (message == null || message.length() == 0) {
harvestRow.setColumnNull("harvest_message");
} else {
harvestRow.setColumn("harvest_message", message);
}
}
public void setHarvestStartTime(Date date) {
if (date == null) {
harvestRow.setColumnNull("harvest_start_time");
} else {
harvestRow.setColumn("harvest_start_time", date);
}
}
/* Getting for the appropriate harvesting-related columns */
public int getCollectionId() {
return harvestRow.getIntColumn("collection_id");
}
public int getHarvestType() {
return harvestRow.getIntColumn("harvest_type");
}
public int getHarvestStatus() {
return harvestRow.getIntColumn("harvest_status");
}
public String getOaiSource() {
return harvestRow.getStringColumn("oai_source");
}
public String getOaiSetId() {
return harvestRow.getStringColumn("oai_set_id");
}
public String getHarvestMetadataConfig() {
return harvestRow.getStringColumn("metadata_config_id");
}
public String getHarvestMessage() {
return harvestRow.getStringColumn("harvest_message");
}
public Date getHarvestDate() {
return harvestRow.getDateColumn("last_harvested");
}
public Date getHarvestStartTime() {
return harvestRow.getDateColumn("harvest_start_time");
}
public void delete() throws SQLException {
DatabaseManager.delete(context, harvestRow);
}
public void update() throws SQLException, IOException, AuthorizeException
{
DatabaseManager.update(context, harvestRow);
}
}

View File

@@ -1,171 +1,171 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.harvest;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Date;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator;
/**
* @author Alexey Maslov
*/
public class HarvestedItem
{
private Context context;
private TableRow harvestRow;
HarvestedItem(Context c, TableRow row)
{
context = c;
harvestRow = row;
}
public static void exists(Context c) throws SQLException {
DatabaseManager.queryTable(c, "harvested_item", "SELECT COUNT(*) FROM harvested_item");
}
/**
* Find the harvest parameters corresponding to the specified DSpace item
* @return a HarvestedItem object corresponding to this item, null if not found.
*/
public static HarvestedItem find(Context c, int item_id) throws SQLException
{
TableRow row = DatabaseManager.findByUnique(c, "harvested_item", "item_id", item_id);
if (row == null) {
return null;
}
return new HarvestedItem(c, row);
}
/*
* select foo.item_id from (select item.item_id, item.owning_collection from item join item2bundle on item.item_id=item2bundle.item_id where item2bundle.bundle_id=22) as foo join collection on foo.owning_collection=collection.collection_id where collection.collection_id=5;
*/
/**
* Retrieve a DSpace Item that corresponds to this particular combination of owning collection and OAI ID.
* @param context
* @param itemOaiID the string used by the OAI-PMH provider to identify the item
* @param collectionID id of the local collection that the item should be found in
* @return DSpace Item or null if no item was found
*/
public static Item getItemByOAIId(Context context, String itemOaiID, int collectionID) throws SQLException
{
/*
* FYI: This method has to be scoped to a collection. Otherwise, we could have collisions as more
* than one collection might be importing the same item. That is OAI_ID's might be unique to the
* provider but not to the harvester.
*/
Item resolvedItem = null;
TableRowIterator tri = null;
final String selectItemFromOaiId = "SELECT dsi.item_id FROM " +
"(SELECT item.item_id, item.owning_collection FROM item JOIN harvested_item ON item.item_id=harvested_item.item_id WHERE harvested_item.oai_id=?) " +
"dsi JOIN collection ON dsi.owning_collection=collection.collection_id WHERE collection.collection_id=?";
try
{
tri = DatabaseManager.query(context, selectItemFromOaiId, itemOaiID, collectionID);
if (tri.hasNext())
{
TableRow row = tri.next();
int itemID = row.getIntColumn("item_id");
resolvedItem = Item.find(context, itemID);
}
else {
return null;
}
}
finally {
if (tri != null)
{
tri.close();
}
}
return resolvedItem;
}
/**
* Create a new harvested item row for a specified item id.
* @return a new HarvestedItem object
*/
public static HarvestedItem create(Context c, int itemId, String itemOAIid) throws SQLException {
TableRow row = DatabaseManager.row("harvested_item");
row.setColumn("item_id", itemId);
row.setColumn("oai_id", itemOAIid);
DatabaseManager.insert(c, row);
return new HarvestedItem(c, row);
}
public String getItemID()
{
String oai_id = harvestRow.getStringColumn("item_id");
return oai_id;
}
/**
* Get the oai_id associated with this item
*/
public String getOaiID()
{
String oai_id = harvestRow.getStringColumn("oai_id");
return oai_id;
}
/**
* Set the oai_id associated with this item
*/
public void setOaiID(String itemOaiID)
{
harvestRow.setColumn("oai_id",itemOaiID);
return;
}
public void setHarvestDate(Date date) {
if (date == null) {
date = new Date();
}
harvestRow.setColumn("last_harvested", date);
}
public Date getHarvestDate() {
return harvestRow.getDateColumn("last_harvested");
}
public void delete() throws SQLException {
DatabaseManager.delete(context, harvestRow);
}
public void update() throws SQLException, IOException, AuthorizeException {
DatabaseManager.update(context, harvestRow);
}
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.harvest;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Date;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator;
/**
* @author Alexey Maslov
*/
public class HarvestedItem
{
private Context context;
private TableRow harvestRow;
HarvestedItem(Context c, TableRow row)
{
context = c;
harvestRow = row;
}
public static void exists(Context c) throws SQLException {
DatabaseManager.queryTable(c, "harvested_item", "SELECT COUNT(*) FROM harvested_item");
}
/**
* Find the harvest parameters corresponding to the specified DSpace item
* @return a HarvestedItem object corresponding to this item, null if not found.
*/
public static HarvestedItem find(Context c, int item_id) throws SQLException
{
TableRow row = DatabaseManager.findByUnique(c, "harvested_item", "item_id", item_id);
if (row == null) {
return null;
}
return new HarvestedItem(c, row);
}
/*
* select foo.item_id from (select item.item_id, item.owning_collection from item join item2bundle on item.item_id=item2bundle.item_id where item2bundle.bundle_id=22) as foo join collection on foo.owning_collection=collection.collection_id where collection.collection_id=5;
*/
/**
* Retrieve a DSpace Item that corresponds to this particular combination of owning collection and OAI ID.
* @param context
* @param itemOaiID the string used by the OAI-PMH provider to identify the item
* @param collectionID id of the local collection that the item should be found in
* @return DSpace Item or null if no item was found
*/
public static Item getItemByOAIId(Context context, String itemOaiID, int collectionID) throws SQLException
{
/*
* FYI: This method has to be scoped to a collection. Otherwise, we could have collisions as more
* than one collection might be importing the same item. That is OAI_ID's might be unique to the
* provider but not to the harvester.
*/
Item resolvedItem = null;
TableRowIterator tri = null;
final String selectItemFromOaiId = "SELECT dsi.item_id FROM " +
"(SELECT item.item_id, item.owning_collection FROM item JOIN harvested_item ON item.item_id=harvested_item.item_id WHERE harvested_item.oai_id=?) " +
"dsi JOIN collection ON dsi.owning_collection=collection.collection_id WHERE collection.collection_id=?";
try
{
tri = DatabaseManager.query(context, selectItemFromOaiId, itemOaiID, collectionID);
if (tri.hasNext())
{
TableRow row = tri.next();
int itemID = row.getIntColumn("item_id");
resolvedItem = Item.find(context, itemID);
}
else {
return null;
}
}
finally {
if (tri != null)
{
tri.close();
}
}
return resolvedItem;
}
/**
* Create a new harvested item row for a specified item id.
* @return a new HarvestedItem object
*/
public static HarvestedItem create(Context c, int itemId, String itemOAIid) throws SQLException {
TableRow row = DatabaseManager.row("harvested_item");
row.setColumn("item_id", itemId);
row.setColumn("oai_id", itemOAIid);
DatabaseManager.insert(c, row);
return new HarvestedItem(c, row);
}
public String getItemID()
{
String oai_id = harvestRow.getStringColumn("item_id");
return oai_id;
}
/**
* Get the oai_id associated with this item
*/
public String getOaiID()
{
String oai_id = harvestRow.getStringColumn("oai_id");
return oai_id;
}
/**
* Set the oai_id associated with this item
*/
public void setOaiID(String itemOaiID)
{
harvestRow.setColumn("oai_id",itemOaiID);
return;
}
public void setHarvestDate(Date date) {
if (date == null) {
date = new Date();
}
harvestRow.setColumn("last_harvested", date);
}
public Date getHarvestDate() {
return harvestRow.getDateColumn("last_harvested");
}
public void delete() throws SQLException {
DatabaseManager.delete(context, harvestRow);
}
public void update() throws SQLException, IOException, AuthorizeException {
DatabaseManager.update(context, harvestRow);
}
}

View File

@@ -1,49 +1,49 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.search;
import java.io.Reader;
import org.apache.lucene.analysis.LowerCaseFilter;
import org.apache.lucene.analysis.StopFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.standard.StandardFilter;
import org.apache.lucene.util.Version;
/**
* Custom Lucene Analyzer that combines the standard filter, lowercase filter
* and stopword filter. Intentionally omits the stemming filter (which is used
* by DSAnalyzer)
*/
public class DSNonStemmingAnalyzer extends DSAnalyzer
{
/**
* Builds an analyzer
*
* @param matchVersion Lucene version to match
*/
public DSNonStemmingAnalyzer(Version matchVersion) {
super(matchVersion);
}
/**
* Create a token stream for this analyzer.
* This is identical to DSAnalyzer, except it omits the stemming filter
*/
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
final Tokenizer source = new DSTokenizer(matchVersion, reader);
TokenStream result = new StandardFilter(matchVersion, source);
result = new LowerCaseFilter(matchVersion, result);
result = new StopFilter(matchVersion, result, stopSet);
return new TokenStreamComponents(source, result);
}
}
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.search;
import java.io.Reader;
import org.apache.lucene.analysis.LowerCaseFilter;
import org.apache.lucene.analysis.StopFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.standard.StandardFilter;
import org.apache.lucene.util.Version;
/**
* Custom Lucene Analyzer that combines the standard filter, lowercase filter
* and stopword filter. Intentionally omits the stemming filter (which is used
* by DSAnalyzer)
*/
public class DSNonStemmingAnalyzer extends DSAnalyzer
{
/**
* Builds an analyzer
*
* @param matchVersion Lucene version to match
*/
public DSNonStemmingAnalyzer(Version matchVersion) {
super(matchVersion);
}
/**
* Create a token stream for this analyzer.
* This is identical to DSAnalyzer, except it omits the stemming filter
*/
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
final Tokenizer source = new DSTokenizer(matchVersion, reader);
TokenStream result = new StandardFilter(matchVersion, source);
result = new LowerCaseFilter(matchVersion, result);
result = new StopFilter(matchVersion, result, stopSet);
return new TokenStreamComponents(source, result);
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,22 +1,22 @@
#---------------------------------------------------------------#
#--------------SWORD V.1 CLIENT CONFIGURATIONS------------------#
#---------------------------------------------------------------#
# Configuration properties used solely by the XMLUI SWORD #
# Client interface (used to submit DSpace content to another #
# SWORD server). #
#---------------------------------------------------------------#
# List of remote Sword servers. Used to build the drop-down list of selectable Sword targets.
targets = http://localhost:8080/sword/servicedocument, \
http://client.swordapp.org/client/servicedocument, \
http://dspace.swordapp.org/sword/servicedocument, \
http://sword.eprints.org/sword-app/servicedocument, \
http://sword.intralibrary.com/IntraLibrary-Deposit/service, \
http://fedora.swordapp.org/sword-fedora/servicedocument
# List of file types from which the user can select. If a type is not supported by the remote server
# it will not appear in the drop-down list.
file-types = application/zip
# List of package formats from which the user can select. If a format is not supported by the remote server
# it will not appear in the drop-down list.
#---------------------------------------------------------------#
#--------------SWORD V.1 CLIENT CONFIGURATIONS------------------#
#---------------------------------------------------------------#
# Configuration properties used solely by the XMLUI SWORD #
# Client interface (used to submit DSpace content to another #
# SWORD server). #
#---------------------------------------------------------------#
# List of remote Sword servers. Used to build the drop-down list of selectable Sword targets.
targets = http://localhost:8080/sword/servicedocument, \
http://client.swordapp.org/client/servicedocument, \
http://dspace.swordapp.org/sword/servicedocument, \
http://sword.eprints.org/sword-app/servicedocument, \
http://sword.intralibrary.com/IntraLibrary-Deposit/service, \
http://fedora.swordapp.org/sword-fedora/servicedocument
# List of file types from which the user can select. If a type is not supported by the remote server
# it will not appear in the drop-down list.
file-types = application/zip
# List of package formats from which the user can select. If a format is not supported by the remote server
# it will not appear in the drop-down list.
package-formats = http://purl.org/net/sword-types/METSDSpaceSIP

View File

@@ -1,41 +1,41 @@
#---------------------------------------------------------------#
#----------TRANSLATOR CURATION TASK CONFIGURATIONS--------------#
#---------------------------------------------------------------#
# Configuration properties used solely by MicrosoftTranslator #
# Curation Task (uses Microsoft Translation API v2) #
#---------------------------------------------------------------#
## Translation field settings
##
## Authoritative language field
## This will be read to determine the original language an item was submitted in
## Default: dc.language
translate.field.language = dc.language
## Metadata fields you wish to have translated
#
translate.field.targets = dc.description.abstract, dc.title, dc.type
## Translation language settings
##
## If the language field configured in translate.field.language is not present
## in the record, set translate.language.default to a default source language
## or leave blank to use autodetection
#
translate.language.default = en
## Target languages for translation
#
translate.language.targets = de, fr
## Translation API settings
##
## Your Bing API v2 key and/or Google "Simple API Access" Key
## (note to Google users: your v1 API key will not work with Translate v2,
## you will need to visit https://code.google.com/apis/console and activate
## a Simple API Access key)
##
## You do not need to enter a key for both services.
#
translate.api.key.microsoft = YOUR_MICROSOFT_API_KEY_GOES_HERE
#---------------------------------------------------------------#
#----------TRANSLATOR CURATION TASK CONFIGURATIONS--------------#
#---------------------------------------------------------------#
# Configuration properties used solely by MicrosoftTranslator #
# Curation Task (uses Microsoft Translation API v2) #
#---------------------------------------------------------------#
## Translation field settings
##
## Authoritative language field
## This will be read to determine the original language an item was submitted in
## Default: dc.language
translate.field.language = dc.language
## Metadata fields you wish to have translated
#
translate.field.targets = dc.description.abstract, dc.title, dc.type
## Translation language settings
##
## If the language field configured in translate.field.language is not present
## in the record, set translate.language.default to a default source language
## or leave blank to use autodetection
#
translate.language.default = en
## Target languages for translation
#
translate.language.targets = de, fr
## Translation API settings
##
## Your Bing API v2 key and/or Google "Simple API Access" Key
## (note to Google users: your v1 API key will not work with Translate v2,
## you will need to visit https://code.google.com/apis/console and activate
## a Simple API Access key)
##
## You do not need to enter a key for both services.
#
translate.api.key.microsoft = YOUR_MICROSOFT_API_KEY_GOES_HERE
translate.api.key.google = YOUR_GOOGLE_API_KEY_GOES_HERE

View File

@@ -1,153 +1,153 @@
--
-- database_schema_15-16.sql
--
-- Version: $$
--
-- Date: $Date: 2009-04-23 22:26:59 -0500 (Thu, 23 Apr 2009) $
--
-- Copyright (c) 2002-2009, The DSpace Foundation. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are
-- met:
--
-- - Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- - Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- - Neither the name of the DSpace Foundation nor the names of its
-- contributors may be used to endorse or promote products derived from
-- this software without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-- ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-- HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-- INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-- BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
-- OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
-- TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
-- USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
-- DAMAGE.
--
-- SQL commands to upgrade the database schema of a live DSpace 1.5 or 1.5.x
-- to the DSpace 1.6 database schema
--
-- DUMP YOUR DATABASE FIRST. DUMP YOUR DATABASE FIRST. DUMP YOUR DATABASE FIRST. DUMP YOUR DATABASE FIRST.
-- DUMP YOUR DATABASE FIRST. DUMP YOUR DATABASE FIRST. DUMP YOUR DATABASE FIRST. DUMP YOUR DATABASE FIRST.
-- DUMP YOUR DATABASE FIRST. DUMP YOUR DATABASE FIRST. DUMP YOUR DATABASE FIRST. DUMP YOUR DATABASE FIRST.
------------------------------------------------------------------
-- New Column for Community Admin - Delegated Admin patch (DS-228)
------------------------------------------------------------------
ALTER TABLE community ADD admin INTEGER REFERENCES epersongroup ( eperson_group_id );
CREATE INDEX community_admin_fk_idx ON Community(admin);
-------------------------------------------------------------------------
-- DS-236 schema changes for Authority Control of Metadata Values
-------------------------------------------------------------------------
ALTER TABLE MetadataValue
ADD ( authority VARCHAR(100),
confidence INTEGER DEFAULT -1);
--------------------------------------------------------------------------
-- DS-295 CC License being assigned incorrect Mime Type during submission.
--------------------------------------------------------------------------
UPDATE bitstream SET bitstream_format_id =
(SELECT bitstream_format_id FROM bitstreamformatregistry WHERE short_description = 'CC License')
WHERE name = 'license_text' AND source = 'org.dspace.license.CreativeCommons';
UPDATE bitstream SET bitstream_format_id =
(SELECT bitstream_format_id FROM bitstreamformatregistry WHERE short_description = 'RDF XML')
WHERE name = 'license_rdf' AND source = 'org.dspace.license.CreativeCommons';
-------------------------------------------------------------------------
-- DS-260 Cleanup of Owning collection column for template item created
-- with the JSPUI after the collection creation
-------------------------------------------------------------------------
UPDATE item SET owning_collection = null WHERE item_id IN
(SELECT template_item_id FROM collection WHERE template_item_id IS NOT null);
------------------------------------------------------------------------------------------------------
-- You need to remove the already in place constraints to add the deferrable option
-- because the constraints name was generated by your oracle instance you need to discovery it before
-- Just copy and paste the commands printed by these three queries:
-- 1. community2collection
select 'ALTER TABLE '||c1.table_name||' DROP CONSTRAINT '||
c1.constraint_name||';' command from user_constraints c1, user_constraints c2
where c1.constraint_type = 'R' and c1.r_constraint_name = c2.constraint_name
and c1.table_name like 'COMMUNITY2COLLECTION'
and c2.table_name LIKE 'COLLECTION';
-- 2. community2community
select 'ALTER TABLE '||c1.table_name||' DROP CONSTRAINT '||
c1.constraint_name||';' command from user_constraints c1, user_constraints c2
where c1.constraint_type = 'R' and c1.r_constraint_name = c2.constraint_name
and c1.table_name like 'COMMUNITY2COMMUNITY'
and c2.table_name LIKE 'COMMUNITY';
-- 3. collection2item
select 'ALTER TABLE '||c1.table_name||' DROP CONSTRAINT '||
c1.constraint_name||';' command from user_constraints c1, user_constraints c2
where c1.constraint_type = 'R' and c1.r_constraint_name = c2.constraint_name
and c1.table_name like 'COLLECTION2ITEM'
and c2.table_name LIKE 'ITEM';
--
-- e.g.
-- ALTER TABLE community2collection DROP CONSTRAINT THECONSTRAINTNAMETHATYOUHAVEFINDWITHTHE1stQUERY;
-- ALTER TABLE community2community DROP CONSTRAINT THECONSTRAINTNAMETHATYOUHAVEFINDWITHTHE2ndQUERY;
-- ALTER TABLE collection2item DROP CONSTRAINT THECONSTRAINTNAMETHATYOUHAVEFINDWITHTHE3rdQUERY;
-- now recreate them with a know name and deferrable option!
select 'ALTER TABLE community2collection ADD CONSTRAINT comm2coll_collection_fk FOREIGN KEY (collection_id) REFERENCES collection DEFERRABLE;' from dual;
select 'ALTER TABLE community2community ADD CONSTRAINT com2com_child_fk FOREIGN KEY (child_comm_id) REFERENCES community DEFERRABLE;' from dual;
select 'ALTER TABLE collection2item ADD CONSTRAINT coll2item_item_fk FOREIGN KEY (item_id) REFERENCES item DEFERRABLE;' from dual;
------------------------------------------------------------------
-- New tables /sequences for the harvester functionality (DS-289)
------------------------------------------------------------------
CREATE SEQUENCE harvested_collection_seq;
CREATE SEQUENCE harvested_item_seq;
-------------------------------------------------------
-- Create the harvest settings table
-------------------------------------------------------
-- Values used by the OAIHarvester to harvest a collection
-- HarvestInstance is the DAO class for this table
CREATE TABLE harvested_collection
(
collection_id INTEGER REFERENCES collection(collection_id) ON DELETE CASCADE,
harvest_type INTEGER,
oai_source VARCHAR(256),
oai_set_id VARCHAR(256),
harvest_message VARCHAR2(512),
metadata_config_id VARCHAR(256),
harvest_status INTEGER,
harvest_start_time TIMESTAMP,
last_harvested TIMESTAMP,
id INTEGER PRIMARY KEY
);
CREATE INDEX harvested_collection_fk_idx ON harvested_collection(collection_id);
CREATE TABLE harvested_item
(
item_id INTEGER REFERENCES item(item_id) ON DELETE CASCADE,
last_harvested TIMESTAMP,
oai_id VARCHAR(64),
id INTEGER PRIMARY KEY
);
CREATE INDEX harvested_item_fk_idx ON harvested_item(item_id);
--
-- database_schema_15-16.sql
--
-- Version: $$
--
-- Date: $Date: 2009-04-23 22:26:59 -0500 (Thu, 23 Apr 2009) $
--
-- Copyright (c) 2002-2009, The DSpace Foundation. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are
-- met:
--
-- - Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- - Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- - Neither the name of the DSpace Foundation nor the names of its
-- contributors may be used to endorse or promote products derived from
-- this software without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-- ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-- HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-- INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-- BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
-- OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
-- TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
-- USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
-- DAMAGE.
--
-- SQL commands to upgrade the database schema of a live DSpace 1.5 or 1.5.x
-- to the DSpace 1.6 database schema
--
-- DUMP YOUR DATABASE FIRST. DUMP YOUR DATABASE FIRST. DUMP YOUR DATABASE FIRST. DUMP YOUR DATABASE FIRST.
-- DUMP YOUR DATABASE FIRST. DUMP YOUR DATABASE FIRST. DUMP YOUR DATABASE FIRST. DUMP YOUR DATABASE FIRST.
-- DUMP YOUR DATABASE FIRST. DUMP YOUR DATABASE FIRST. DUMP YOUR DATABASE FIRST. DUMP YOUR DATABASE FIRST.
------------------------------------------------------------------
-- New Column for Community Admin - Delegated Admin patch (DS-228)
------------------------------------------------------------------
ALTER TABLE community ADD admin INTEGER REFERENCES epersongroup ( eperson_group_id );
CREATE INDEX community_admin_fk_idx ON Community(admin);
-------------------------------------------------------------------------
-- DS-236 schema changes for Authority Control of Metadata Values
-------------------------------------------------------------------------
ALTER TABLE MetadataValue
ADD ( authority VARCHAR(100),
confidence INTEGER DEFAULT -1);
--------------------------------------------------------------------------
-- DS-295 CC License being assigned incorrect Mime Type during submission.
--------------------------------------------------------------------------
UPDATE bitstream SET bitstream_format_id =
(SELECT bitstream_format_id FROM bitstreamformatregistry WHERE short_description = 'CC License')
WHERE name = 'license_text' AND source = 'org.dspace.license.CreativeCommons';
UPDATE bitstream SET bitstream_format_id =
(SELECT bitstream_format_id FROM bitstreamformatregistry WHERE short_description = 'RDF XML')
WHERE name = 'license_rdf' AND source = 'org.dspace.license.CreativeCommons';
-------------------------------------------------------------------------
-- DS-260 Cleanup of Owning collection column for template item created
-- with the JSPUI after the collection creation
-------------------------------------------------------------------------
UPDATE item SET owning_collection = null WHERE item_id IN
(SELECT template_item_id FROM collection WHERE template_item_id IS NOT null);
------------------------------------------------------------------------------------------------------
-- You need to remove the already in place constraints to add the deferrable option
-- because the constraints name was generated by your oracle instance you need to discovery it before
-- Just copy and paste the commands printed by these three queries:
-- 1. community2collection
select 'ALTER TABLE '||c1.table_name||' DROP CONSTRAINT '||
c1.constraint_name||';' command from user_constraints c1, user_constraints c2
where c1.constraint_type = 'R' and c1.r_constraint_name = c2.constraint_name
and c1.table_name like 'COMMUNITY2COLLECTION'
and c2.table_name LIKE 'COLLECTION';
-- 2. community2community
select 'ALTER TABLE '||c1.table_name||' DROP CONSTRAINT '||
c1.constraint_name||';' command from user_constraints c1, user_constraints c2
where c1.constraint_type = 'R' and c1.r_constraint_name = c2.constraint_name
and c1.table_name like 'COMMUNITY2COMMUNITY'
and c2.table_name LIKE 'COMMUNITY';
-- 3. collection2item
select 'ALTER TABLE '||c1.table_name||' DROP CONSTRAINT '||
c1.constraint_name||';' command from user_constraints c1, user_constraints c2
where c1.constraint_type = 'R' and c1.r_constraint_name = c2.constraint_name
and c1.table_name like 'COLLECTION2ITEM'
and c2.table_name LIKE 'ITEM';
--
-- e.g.
-- ALTER TABLE community2collection DROP CONSTRAINT THECONSTRAINTNAMETHATYOUHAVEFINDWITHTHE1stQUERY;
-- ALTER TABLE community2community DROP CONSTRAINT THECONSTRAINTNAMETHATYOUHAVEFINDWITHTHE2ndQUERY;
-- ALTER TABLE collection2item DROP CONSTRAINT THECONSTRAINTNAMETHATYOUHAVEFINDWITHTHE3rdQUERY;
-- now recreate them with a know name and deferrable option!
select 'ALTER TABLE community2collection ADD CONSTRAINT comm2coll_collection_fk FOREIGN KEY (collection_id) REFERENCES collection DEFERRABLE;' from dual;
select 'ALTER TABLE community2community ADD CONSTRAINT com2com_child_fk FOREIGN KEY (child_comm_id) REFERENCES community DEFERRABLE;' from dual;
select 'ALTER TABLE collection2item ADD CONSTRAINT coll2item_item_fk FOREIGN KEY (item_id) REFERENCES item DEFERRABLE;' from dual;
------------------------------------------------------------------
-- New tables /sequences for the harvester functionality (DS-289)
------------------------------------------------------------------
CREATE SEQUENCE harvested_collection_seq;
CREATE SEQUENCE harvested_item_seq;
-------------------------------------------------------
-- Create the harvest settings table
-------------------------------------------------------
-- Values used by the OAIHarvester to harvest a collection
-- HarvestInstance is the DAO class for this table
CREATE TABLE harvested_collection
(
collection_id INTEGER REFERENCES collection(collection_id) ON DELETE CASCADE,
harvest_type INTEGER,
oai_source VARCHAR(256),
oai_set_id VARCHAR(256),
harvest_message VARCHAR2(512),
metadata_config_id VARCHAR(256),
harvest_status INTEGER,
harvest_start_time TIMESTAMP,
last_harvested TIMESTAMP,
id INTEGER PRIMARY KEY
);
CREATE INDEX harvested_collection_fk_idx ON harvested_collection(collection_id);
CREATE TABLE harvested_item
(
item_id INTEGER REFERENCES item(item_id) ON DELETE CASCADE,
last_harvested TIMESTAMP,
oai_id VARCHAR(64),
id INTEGER PRIMARY KEY
);
CREATE INDEX harvested_item_fk_idx ON harvested_item(item_id);

File diff suppressed because it is too large Load Diff