Merge branch 'master' into DS-1814

# Conflicts:
#	dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java
This commit is contained in:
Luigi Andrea Pascarelli
2016-08-24 18:24:57 +02:00
36 changed files with 1091 additions and 431 deletions

View File

@@ -118,15 +118,17 @@ public class ResourcePolicyServiceImpl implements ResourcePolicyService
*/
@Override
public void delete(Context context, ResourcePolicy resourcePolicy) throws SQLException, AuthorizeException {
// FIXME: authorizations
// Remove ourself
resourcePolicyDAO.delete(context, resourcePolicy);
context.turnOffAuthorisationSystem();
if(resourcePolicy.getdSpaceObject() != null)
{
//A policy for a DSpace Object has been modified, fire a modify event on the DSpace object
contentServiceFactory.getDSpaceObjectService(resourcePolicy.getdSpaceObject()).updateLastModified(context, resourcePolicy.getdSpaceObject());
}
// FIXME: authorizations
// Remove ourself
resourcePolicyDAO.delete(context, resourcePolicy);
context.restoreAuthSystemState();
}
@@ -203,26 +205,34 @@ public class ResourcePolicyServiceImpl implements ResourcePolicyService
@Override
public void removeAllPolicies(Context c, DSpaceObject o) throws SQLException, AuthorizeException {
contentServiceFactory.getDSpaceObjectService(o).updateLastModified(c, o);
resourcePolicyDAO.deleteByDso(c, o);
c.turnOffAuthorisationSystem();
contentServiceFactory.getDSpaceObjectService(o).updateLastModified(c, o);
c.restoreAuthSystemState();
}
@Override
public void removePolicies(Context c, DSpaceObject o, String type) throws SQLException, AuthorizeException {
contentServiceFactory.getDSpaceObjectService(o).updateLastModified(c, o);
resourcePolicyDAO.deleteByDsoAndType(c, o, type);
c.turnOffAuthorisationSystem();
contentServiceFactory.getDSpaceObjectService(o).updateLastModified(c, o);
c.restoreAuthSystemState();
}
@Override
public void removeDsoGroupPolicies(Context context, DSpaceObject dso, Group group) throws SQLException, AuthorizeException {
contentServiceFactory.getDSpaceObjectService(dso).updateLastModified(context, dso);
resourcePolicyDAO.deleteByDsoGroupPolicies(context, dso, group);
context.turnOffAuthorisationSystem();
contentServiceFactory.getDSpaceObjectService(dso).updateLastModified(context, dso);
context.restoreAuthSystemState();
}
@Override
public void removeDsoEPersonPolicies(Context context, DSpaceObject dso, EPerson ePerson) throws SQLException, AuthorizeException {
contentServiceFactory.getDSpaceObjectService(dso).updateLastModified(context, dso);
resourcePolicyDAO.deleteByDsoEPersonPolicies(context, dso, ePerson);
context.turnOffAuthorisationSystem();
contentServiceFactory.getDSpaceObjectService(dso).updateLastModified(context, dso);
context.restoreAuthSystemState();
}
@@ -237,15 +247,19 @@ public class ResourcePolicyServiceImpl implements ResourcePolicyService
{
removeAllPolicies(c, o);
}else{
contentServiceFactory.getDSpaceObjectService(o).updateLastModified(c, o);
resourcePolicyDAO.deleteByDsoAndAction(c, o, actionId);
c.turnOffAuthorisationSystem();
contentServiceFactory.getDSpaceObjectService(o).updateLastModified(c, o);
c.restoreAuthSystemState();
}
}
@Override
public void removeDsoAndTypeNotEqualsToPolicies(Context c, DSpaceObject o, String type) throws SQLException, AuthorizeException {
contentServiceFactory.getDSpaceObjectService(o).updateLastModified(c, o);
resourcePolicyDAO.deleteByDsoAndTypeNotEqualsTo(c, o, type);
c.turnOffAuthorisationSystem();
contentServiceFactory.getDSpaceObjectService(o).updateLastModified(c, o);
c.restoreAuthSystemState();
}
@@ -279,10 +293,12 @@ public class ResourcePolicyServiceImpl implements ResourcePolicyService
}
//Update the last modified timestamp of all related DSpace Objects
context.turnOffAuthorisationSystem();
for (DSpaceObject dSpaceObject : relatedDSpaceObjects) {
//A policy for a DSpace Object has been modified, fire a modify event on the DSpace object
contentServiceFactory.getDSpaceObjectService(dSpaceObject).updateLastModified(context, dSpaceObject);
contentServiceFactory.getDSpaceObjectService(dSpaceObject).updateLastModified(context, dSpaceObject);
}
context.restoreAuthSystemState();
}
}
}

View File

@@ -62,6 +62,7 @@ public class InstallItemServiceImpl implements InstallItemService
AuthorizeException
{
Item item = is.getItem();
Collection collection = is.getCollection();
try {
if(suppliedHandle == null)
{
@@ -75,7 +76,15 @@ public class InstallItemServiceImpl implements InstallItemService
populateMetadata(c, item);
return finishItem(c, item, is);
// Finish up / archive the item
item = finishItem(c, item, is);
// As this is a BRAND NEW item, as a final step we need to remove the
// submitter item policies created during deposit and replace them with
// the default policies from the collection.
itemService.inheritCollectionDefaultPolicies(c, item, collection);
return item;
}
@Override
@@ -102,7 +111,7 @@ public class InstallItemServiceImpl implements InstallItemService
// If the item doesn't have a date.accessioned, set it to today
List<MetadataValue> dateAccessioned = itemService.getMetadata(item, MetadataSchema.DC_SCHEMA, "date", "accessioned", Item.ANY);
if (dateAccessioned.size() == 0)
if (dateAccessioned.isEmpty())
{
itemService.addMetadata(c, item, MetadataSchema.DC_SCHEMA, "date", "accessioned", null, now.toString());
}
@@ -174,7 +183,7 @@ public class InstallItemServiceImpl implements InstallItemService
// If an issue date was passed in and it wasn't set to "today" (literal string)
// then note this previous issue date in provenance message
if (currentDateIssued.size() != 0)
if (!currentDateIssued.isEmpty())
{
String previousDateIssued = currentDateIssued.get(0).getValue();
if(previousDateIssued!=null && !previousDateIssued.equalsIgnoreCase("today"))
@@ -189,8 +198,18 @@ public class InstallItemServiceImpl implements InstallItemService
itemService.addMetadata(c, item, MetadataSchema.DC_SCHEMA, "description", "provenance", "en", provDescription);
}
// final housekeeping when adding new Item to archive
// common between installing and "restoring" items.
/**
* Final housekeeping when adding a new Item into the archive.
* This method is used by *both* installItem() and restoreItem(),
* so all actions here will be run for a newly added item or a restored item.
*
* @param c DSpace Context
* @param item Item in question
* @param is InProgressSubmission object
* @return final "archived" Item
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
*/
protected Item finishItem(Context c, Item item, InProgressSubmission is)
throws SQLException, AuthorizeException
{
@@ -213,10 +232,6 @@ public class InstallItemServiceImpl implements InstallItemService
// remove in-progress submission
contentServiceFactory.getInProgressSubmissionService(is).deleteWrapper(c, is);
// remove the item's policies and replace them with
// the defaults from the collection
itemService.inheritCollectionDefaultPolicies(c, item, is.getCollection());
// set embargo lift date and take away read access if indicated.
embargoService.setEmbargo(c, item);

View File

@@ -47,6 +47,7 @@ import org.dspace.identifier.IdentifierException;
import org.dspace.identifier.service.IdentifierService;
import org.dspace.services.ConfigurationService;
import org.dspace.versioning.service.VersioningService;
import org.dspace.workflow.WorkflowItemService;
import org.springframework.beans.factory.annotation.Autowired;
/**
@@ -93,6 +94,12 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
@Autowired(required=true)
protected ConfigurationService configurationService;
@Autowired(required=true)
protected WorkspaceItemService workspaceItemService;
@Autowired(required=true)
protected WorkflowItemService workflowItemService;
protected ItemServiceImpl()
{
super();
@@ -893,28 +900,37 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
// is this collection not yet created, and an item template is created
if (item.getOwningCollection() == null)
{
return true;
if (!isInProgressSubmission(context, item)) {
return true;
}
else {
return false;
}
}
return collectionService.canEditBoolean(context, item.getOwningCollection(), false);
}
@Override
public boolean canCreateNewVersion(Context context, Item item) throws SQLException{
if (authorizeService.isAdmin(context, item))
{
return true;
}
if (context.getCurrentUser() != null
&& context.getCurrentUser().equals(item.getSubmitter()))
{
return configurationService.getPropertyAsType(
"versioning.submitterCanCreateNewVersion", false);
}
return false;
/**
* Check if the item is an inprogress submission
* @param context
* @param item
* @return <code>true</code> if the item is an inprogress submission, i.e. a WorkspaceItem or WorkflowItem
* @throws SQLException
*/
public boolean isInProgressSubmission(Context context, Item item) throws SQLException {
return workspaceItemService.findByItem(context, item) != null
|| workflowItemService.findByItem(context, item) != null;
}
/*
With every finished submission a bunch of resource policy entries with have null value for the dspace_object column are generated in the database.
prevent the generation of resource policy entry values with null dspace_object as value
*/
@Override
/**
* Add the default policies, which have not been already added to the given DSpace object
@@ -1213,4 +1229,20 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
// return count of items that are not in archive and withdrawn
return itemDAO.countItems(context, false, true);
}
public boolean canCreateNewVersion(Context context, Item item) throws SQLException{
if (authorizeService.isAdmin(context, item))
{
return true;
}
if (context.getCurrentUser() != null
&& context.getCurrentUser().equals(item.getSubmitter()))
{
return configurationService.getPropertyAsType(
"versioning.submitterCanCreateNewVersion", false);
}
return false;
}
}

View File

@@ -17,7 +17,6 @@ import java.util.Map;
import java.util.Date;
import java.text.SimpleDateFormat;
import java.util.logging.Level;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
@@ -448,238 +447,167 @@ public class METSRightsCrosswalk
public void ingest(Context context, DSpaceObject dso, List<Element> ml, boolean createMissingMetadataFields)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
// we cannot crosswalk METSRights to a SITE object
// SITE objects are not supported by the METSRightsCrosswalk
if (dso.getType() == Constants.SITE)
{
throw new CrosswalkObjectNotSupported("Wrong target object type, METSRightsCrosswalk cannot crosswalk a SITE object.");
}
//First, clear all existing Policies on this DSpace Object
// as we don't want them to conflict with policies we will be adding
if(!ml.isEmpty())
// If we're fed the top-level <RightsDeclarationMD> wrapper element, recurse into its guts.
// What we need to analyze are the <Context> elements underneath it.
if(!ml.isEmpty() && ml.get(0).getName().equals("RightsDeclarationMD"))
{
authorizeService.removeAllPolicies(context, dso);
ingest(context, dso, ml.get(0).getChildren(), createMissingMetadataFields);
}
// Loop through each Element in the List
List<ResourcePolicy> policies = new ArrayList<ResourcePolicy>();
for (Element element : ml)
else
{
// if we're fed a <RightsDeclarationMD> wrapper object, recurse on its guts:
if (element.getName().equals("RightsDeclarationMD"))
// Loop through each <Context> Element in the passed in List, creating a ResourcePolicy for each
List<ResourcePolicy> policies = new ArrayList<>();
for (Element element : ml)
{
ingest(context, dso, element.getChildren(), createMissingMetadataFields);
}
// "Context" section (where permissions are stored)
else if (element.getName().equals("Context"))
{
//get what class of context this is
String contextClass = element.getAttributeValue("CONTEXTCLASS");
ResourcePolicy rp = resourcePolicyService.create(context);
SimpleDateFormat sdf = new SimpleDateFormat( "yyyy-MM-dd" );
// get reference to the <Permissions> element
// Note: we are assuming here that there will only ever be ONE <Permissions>
// element. Currently there are no known use cases for multiple.
Element permsElement = element.getChild("Permissions", METSRights_NS);
if(permsElement == null) {
log.error("No <Permissions> element was found. Skipping this <Context> element.");
continue;
}
if (element.getAttributeValue("rpName") != null)
// Must be a "Context" section (where permissions are stored)
if (element.getName().equals("Context"))
{
rp.setRpName(element.getAttributeValue("rpName"));
}
try {
if (element.getAttributeValue("start-date") != null)
{
rp.setStartDate(sdf.parse(element.getAttributeValue("start-date")));
}
if (element.getAttributeValue("end-date") != null)
{
rp.setEndDate(sdf.parse(element.getAttributeValue("end-date")));
}
}catch (ParseException ex) {
log.error("Failed to parse embargo date. The date needs to be in the format 'yyyy-MM-dd'.", ex);
}
//Check if this permission pertains to Anonymous users
if(ANONYMOUS_CONTEXTCLASS.equals(contextClass))
{
//get DSpace Anonymous group, ID=0
Group anonGroup = groupService.findByName(context, Group.ANONYMOUS);
if(anonGroup==null)
{
throw new CrosswalkInternalException("The DSpace database has not been properly initialized. The Anonymous Group is missing from the database.");
//get what class of context this is
String contextClass = element.getAttributeValue("CONTEXTCLASS");
ResourcePolicy rp = resourcePolicyService.create(context);
SimpleDateFormat sdf = new SimpleDateFormat( "yyyy-MM-dd" );
// get reference to the <Permissions> element
// Note: we are assuming here that there will only ever be ONE <Permissions>
// element. Currently there are no known use cases for multiple.
Element permsElement = element.getChild("Permissions", METSRights_NS);
if(permsElement == null) {
log.error("No <Permissions> element was found. Skipping this <Context> element.");
continue;
}
rp.setGroup(anonGroup);
} // else if this permission declaration pertains to Administrators
else if(ADMIN_CONTEXTCLASS.equals(contextClass))
{
//get DSpace Administrator group, ID=1
Group adminGroup = groupService.findByName(context, Group.ADMIN);
if(adminGroup==null)
if (element.getAttributeValue("rpName") != null)
{
throw new CrosswalkInternalException("The DSpace database has not been properly initialized. The Administrator Group is missing from the database.");
rp.setRpName(element.getAttributeValue("rpName"));
}
rp.setGroup(adminGroup);
} // else if this permission pertains to another DSpace group
else if(GROUP_CONTEXTCLASS.equals(contextClass))
{
try
{
//we need to find the name of DSpace group it pertains to
//Get the text within the <UserName> child element,
// this is the group's name
String groupName = element.getChildTextTrim("UserName", METSRights_NS);
//Translate Group name back to internal ID format (e.g. COLLECTION_<ID>_ADMIN)
// from its external format (e.g. COLLECTION_<handle>_ADMIN)
groupName = PackageUtils.translateGroupNameForImport(context, groupName);
//Check if this group exists in DSpace already
Group group = groupService.findByName(context, groupName);
//if not found, throw an error -- user should restore group from the SITE AIP
if(group==null)
try {
if (element.getAttributeValue("start-date") != null)
{
throw new CrosswalkInternalException("Cannot restore Group permissions on object ("
rp.setStartDate(sdf.parse(element.getAttributeValue("start-date")));
}
if (element.getAttributeValue("end-date") != null)
{
rp.setEndDate(sdf.parse(element.getAttributeValue("end-date")));
}
}catch (ParseException ex) {
log.error("Failed to parse embargo date. The date needs to be in the format 'yyyy-MM-dd'.", ex);
}
//Check if this permission pertains to Anonymous users
if(ANONYMOUS_CONTEXTCLASS.equals(contextClass))
{
//get DSpace Anonymous group, ID=0
Group anonGroup = groupService.findByName(context, Group.ANONYMOUS);
if(anonGroup==null)
{
throw new CrosswalkInternalException("The DSpace database has not been properly initialized. The Anonymous Group is missing from the database.");
}
rp.setGroup(anonGroup);
} // else if this permission declaration pertains to Administrators
else if(ADMIN_CONTEXTCLASS.equals(contextClass))
{
//get DSpace Administrator group, ID=1
Group adminGroup = groupService.findByName(context, Group.ADMIN);
if(adminGroup==null)
{
throw new CrosswalkInternalException("The DSpace database has not been properly initialized. The Administrator Group is missing from the database.");
}
rp.setGroup(adminGroup);
} // else if this permission pertains to another DSpace group
else if(GROUP_CONTEXTCLASS.equals(contextClass))
{
try
{
//we need to find the name of DSpace group it pertains to
//Get the text within the <UserName> child element,
// this is the group's name
String groupName = element.getChildTextTrim("UserName", METSRights_NS);
//Translate Group name back to internal ID format (e.g. COLLECTION_<ID>_ADMIN)
// from its external format (e.g. COLLECTION_<handle>_ADMIN)
groupName = PackageUtils.translateGroupNameForImport(context, groupName);
//Check if this group exists in DSpace already
Group group = groupService.findByName(context, groupName);
//if not found, throw an error -- user should restore group from the SITE AIP
if(group==null)
{
throw new CrosswalkInternalException("Cannot restore Group permissions on object ("
+ "type=" + Constants.typeText[dso.getType()] + ", "
+ "handle=" + dso.getHandle() + ", "
+ "ID=" + dso.getID()
+ "). The Group named '" + groupName + "' is missing from DSpace. "
+ "Please restore this group using the SITE AIP, or recreate it.");
}
//assign group to policy
rp.setGroup(group);
}
catch(PackageException pe)
{
//A PackageException will only be thrown if translateDefaultGroupName() fails
//We'll just wrap it as a CrosswalkException and throw it upwards
throw new CrosswalkException(pe);
}
}// else if this permission pertains to a DSpace person
else if(PERSON_CONTEXTCLASS.equals(contextClass))
{
//we need to find the person it pertains to
// Get the text within the <UserName> child element,
// this is the person's email address
String personEmail = element.getChildTextTrim("UserName", METSRights_NS);
//Check if this person exists in DSpace already
EPerson person = ePersonService.findByEmail(context, personEmail);
//If cannot find by email, try by netID
//(though METSRights should contain email if it was exported by DSpace)
if(person==null)
{
person = ePersonService.findByNetid(context, personEmail);
}
//if not found, throw an error -- user should restore person from the SITE AIP
if(person==null)
{
throw new CrosswalkInternalException("Cannot restore Person permissions on object ("
+ "type=" + Constants.typeText[dso.getType()] + ", "
+ "handle=" + dso.getHandle() + ", "
+ "ID=" + dso.getID()
+ "). The Group named '" + groupName + "' is missing from DSpace. "
+ "Please restore this group using the SITE AIP, or recreate it.");
+ "). The Person with email/netid '" + personEmail + "' is missing from DSpace. "
+ "Please restore this Person object using the SITE AIP, or recreate it.");
}
//assign group to policy
rp.setGroup(group);
}
catch(PackageException pe)
{
//A PackageException will only be thrown if translateDefaultGroupName() fails
//We'll just wrap it as a CrosswalkException and throw it upwards
throw new CrosswalkException(pe);
}
}// else if this permission pertains to a DSpace person
else if(PERSON_CONTEXTCLASS.equals(contextClass))
{
//we need to find the person it pertains to
// Get the text within the <UserName> child element,
// this is the person's email address
String personEmail = element.getChildTextTrim("UserName", METSRights_NS);
//Check if this person exists in DSpace already
EPerson person = ePersonService.findByEmail(context, personEmail);
//If cannot find by email, try by netID
//(though METSRights should contain email if it was exported by DSpace)
if(person==null)
{
person = ePersonService.findByNetid(context, personEmail);
//assign person to the policy
rp.setEPerson(person);
}//end if Person
else {
log.error("Unrecognized CONTEXTCLASS: " + contextClass);
}
//if not found, throw an error -- user should restore person from the SITE AIP
if(person==null)
{
throw new CrosswalkInternalException("Cannot restore Person permissions on object ("
+ "type=" + Constants.typeText[dso.getType()] + ", "
+ "handle=" + dso.getHandle() + ", "
+ "ID=" + dso.getID()
+ "). The Person with email/netid '" + personEmail + "' is missing from DSpace. "
+ "Please restore this Person object using the SITE AIP, or recreate it.");
}
//set permissions on policy add to list of policies
rp.setAction(parsePermissions(permsElement));
policies.add(rp);
} //end if "Context" element
}//end for loop
//assign person to the policy
rp.setEPerson(person);
}//end if Person
else {
log.error("Unrecognized CONTEXTCLASS: " + contextClass);
}
//set permissions on policy and add to object
rp.setAction(parsePermissions(permsElement));
policies.add(rp);
assignPermissions(context, dso, policies);
} //end if "Context" element
}//end while loop
}
/**
* Parses the 'permsElement' (corresponding to a <code>Permissions</code>
* element), and assigns those permissions to the specified Group
* on the specified DSpace Object.
*
* @param context DSpace context object
* @param dso The DSpace Object
*/
private void assignPermissions(Context context, DSpaceObject dso, List<ResourcePolicy> policies)
throws SQLException, AuthorizeException
{
authorizeService.removeAllPolicies(context, dso);
if (policies == null){
throw new AuthorizeException("Policies are null");
}
else{
// Finally, we need to remove any existing policies from the current object,
// and replace them with the policies provided via METSRights. NOTE:
// if the list of policies provided by METSRights is an empty list, then
// the final object will have no policies attached.
authorizeService.removeAllPolicies(context, dso);
authorizeService.addPolicies(context, policies, dso);
}
}
private void assignPermissions(Context context, DSpaceObject dso, Group group, Element permsElement)
throws SQLException, AuthorizeException
{
//first, parse our permissions to determine which action we are allowing in DSpace
int actionID = parsePermissions(permsElement);
//If action ID is less than base READ permissions (value=0),
// then something must've gone wrong in the parsing
if(actionID < Constants.READ)
{
log.warn("Unable to properly restore all access permissions on object ("
+ "type=" + Constants.typeText[dso.getType()] + ", "
+ "handle=" + dso.getHandle() + ", "
+ "ID=" + dso.getID()
+ ") for group '" + group.getName() + "'.");
}
//Otherwise, add the appropriate group policy for this object
authorizeService.addPolicy(context, dso, actionID, group);
}
/**
* Parses the 'permsElement' (corresponding to a <code>Permissions</code>
* element), and assigns those permissions to the specified EPerson
* on the specified DSpace Object.
*
* @param context DSpace context object
* @param dso The DSpace Object
* @param person The DSpace EPerson
* @param permsElement The METSRights <code>Permissions</code> element
*/
private void assignPermissions(Context context, DSpaceObject dso, EPerson person, Element permsElement)
throws SQLException, AuthorizeException
{
//first, parse our permissions to determine which action we are allowing in DSpace
int actionID = parsePermissions(permsElement);
//If action ID is less than base READ permissions (value=0),
// then something must've gone wrong in the parsing
if(actionID < Constants.READ)
{
log.warn("Unable to properly restore all access permissions on object ("
+ "type=" + Constants.typeText[dso.getType()] + ", "
+ "handle=" + dso.getHandle() + ", "
+ "ID=" + dso.getID()
+ ") for person '" + person.getEmail() + "'.");
}
//Otherwise, add the appropriate EPerson policy for this object
authorizeService.addPolicy(context, dso, actionID, person);
} // end else
}
/**

View File

@@ -333,18 +333,18 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester
}
else
{
ZipFile zip = new ZipFile(pkgFile);
try(ZipFile zip = new ZipFile(pkgFile))
{
// Retrieve the manifest file entry (named mets.xml)
ZipEntry manifestEntry = zip.getEntry(METSManifest.MANIFEST_FILE);
// Retrieve the manifest file entry (named mets.xml)
ZipEntry manifestEntry = zip.getEntry(METSManifest.MANIFEST_FILE);
// parse the manifest and sanity-check it.
manifest = METSManifest.create(zip.getInputStream(manifestEntry),
validate, getConfigurationName());
// close the Zip file for now
// (we'll extract the other files from zip when we need them)
zip.close();
if(manifestEntry!=null)
{
// parse the manifest and sanity-check it.
manifest = METSManifest.create(zip.getInputStream(manifestEntry),
validate, getConfigurationName());
}
}
}
// return our parsed out METS manifest

View File

@@ -563,4 +563,12 @@ public interface ItemService extends DSpaceObjectService<Item>, DSpaceObjectLega
* @throws SQLException if database error
*/
int countWithdrawnItems(Context context) throws SQLException;
/**
* Check if the supplied item is an inprogress submission
* @param context
* @param item
* @return <code>true</code> if the item is linked to a workspaceitem or workflowitem
*/
boolean isInProgressSubmission(Context context, Item item) throws SQLException;
}

View File

@@ -26,8 +26,8 @@ public class Handle implements ReloadableEntity<Integer> {
@Id
@Column(name="handle_id")
@GeneratedValue(strategy = GenerationType.SEQUENCE ,generator="handle_seq")
@SequenceGenerator(name="handle_seq", sequenceName="handle_seq", allocationSize = 1)
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator="handle_id_seq")
@SequenceGenerator(name="handle_id_seq", sequenceName="handle_id_seq", allocationSize = 1)
private Integer id;
@Column(name = "handle", unique = true)
@@ -57,6 +57,7 @@ public class Handle implements ReloadableEntity<Integer> {
}
@Override
public Integer getID() {
return id;
}
@@ -91,6 +92,7 @@ public class Handle implements ReloadableEntity<Integer> {
return resourceTypeId;
}
@Override
public boolean equals(final Object o) {
if (this == o) return true;
@@ -105,6 +107,7 @@ public class Handle implements ReloadableEntity<Integer> {
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(id)

View File

@@ -8,20 +8,22 @@
package org.dspace.handle;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.content.DSpaceObject;
import org.dspace.content.service.SiteService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.dao.HandleDAO;
import org.dspace.handle.service.HandleService;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
/**
* Interface to the <a href="http://www.handle.net" target=_new>CNRI Handle
* System </a>.
@@ -46,6 +48,8 @@ public class HandleServiceImpl implements HandleService
@Autowired(required = true)
protected HandleDAO handleDAO;
@Autowired(required = true)
protected ConfigurationService configurationService;
@Autowired
protected SiteService siteService;
@@ -66,7 +70,7 @@ public class HandleServiceImpl implements HandleService
return null;
}
String url = ConfigurationManager.getProperty("dspace.url")
String url = configurationService.getProperty("dspace.url")
+ "/handle/" + handle;
if (log.isDebugEnabled())
@@ -81,9 +85,9 @@ public class HandleServiceImpl implements HandleService
public String resolveUrlToHandle(Context context, String url)
throws SQLException
{
String dspaceUrl = ConfigurationManager.getProperty("dspace.url")
String dspaceUrl = configurationService.getProperty("dspace.url")
+ "/handle/";
String handleResolver = ConfigurationManager.getProperty("handle.canonical.prefix");
String handleResolver = configurationService.getProperty("handle.canonical.prefix");
String handle = null;
@@ -119,8 +123,8 @@ public class HandleServiceImpl implements HandleService
// Let the admin define a new prefix, if not then we'll use the
// CNRI default. This allows the admin to use "hdl:" if they want to or
// use a locally branded prefix handle.myuni.edu.
String handlePrefix = ConfigurationManager.getProperty("handle.canonical.prefix");
if (handlePrefix == null || handlePrefix.length() == 0)
String handlePrefix = configurationService.getProperty("handle.canonical.prefix");
if (StringUtils.isBlank(handlePrefix))
{
handlePrefix = "http://hdl.handle.net/";
}
@@ -133,7 +137,7 @@ public class HandleServiceImpl implements HandleService
throws SQLException
{
Handle handle = handleDAO.create(context, new Handle());
String handleId = createId(handle.getID());
String handleId = createId(context);
handle.setHandle(handleId);
handle.setDSpaceObject(dso);
@@ -302,8 +306,8 @@ public class HandleServiceImpl implements HandleService
@Override
public String getPrefix()
{
String prefix = ConfigurationManager.getProperty("handle.prefix");
if (null == prefix)
String prefix = configurationService.getProperty("handle.prefix");
if (StringUtils.isBlank(prefix))
{
prefix = EXAMPLE_PREFIX; // XXX no good way to exit cleanly
log.error("handle.prefix is not configured; using " + prefix);
@@ -386,18 +390,22 @@ public class HandleServiceImpl implements HandleService
}
/**
* Create a new handle id. The implementation uses the PK of the RDBMS
* Handle table.
* Create/mint a new handle id.
*
* @param context DSpace Context
* @return A new handle id
* @exception SQLException
* If a database error occurs
*/
protected String createId(int id) throws SQLException
protected String createId(Context context) throws SQLException
{
// Get configured prefix
String handlePrefix = getPrefix();
return handlePrefix + (handlePrefix.endsWith("/") ? "" : "/") + id;
// Get next available suffix (as a Long, since DSpace uses an incrementing sequence)
Long handleSuffix = handleDAO.getNextHandleSuffix(context);
return handlePrefix + (handlePrefix.endsWith("/") ? "" : "/") + handleSuffix.toString();
}
@Override

View File

@@ -24,6 +24,8 @@ import java.util.List;
*/
public interface HandleDAO extends GenericDAO<Handle> {
public Long getNextHandleSuffix(Context context) throws SQLException;
public List<Handle> getHandlesByDSpaceObject(Context context, DSpaceObject dso) throws SQLException;
public Handle findByHandle(Context context, String handle)throws SQLException;

View File

@@ -7,6 +7,9 @@
*/
package org.dspace.handle.dao.impl;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
import org.dspace.core.AbstractHibernateDAO;
@@ -15,6 +18,10 @@ import org.dspace.handle.dao.HandleDAO;
import org.hibernate.Criteria;
import org.hibernate.Query;
import org.hibernate.criterion.Restrictions;
import org.hibernate.dialect.Dialect;
import org.hibernate.jdbc.ReturningWork;
import org.hibernate.service.jdbc.dialect.internal.StandardDialectResolver;
import org.hibernate.service.jdbc.dialect.spi.DialectResolver;
import java.sql.SQLException;
import java.util.Collections;
@@ -29,6 +36,9 @@ import java.util.List;
*/
public class HandleDAOImpl extends AbstractHibernateDAO<Handle> implements HandleDAO
{
// The name of the sequence used to determine next available handle
private static final String HANDLE_SEQUENCE = "handle_seq";
protected HandleDAOImpl()
{
super();
@@ -94,4 +104,45 @@ public class HandleDAOImpl extends AbstractHibernateDAO<Handle> implements Handl
public int countRows(Context context) throws SQLException {
return count(createQuery(context, "SELECT count(*) FROM Handle"));
}
/**
* Return next available value of Handle suffix (based on DB sequence).
* @param context Current DSpace Context
* @return next available Handle suffix (as a Long)
* @throws SQLException if database error or sequence doesn't exist
*/
@Override
public Long getNextHandleSuffix(Context context) throws SQLException
{
// Create a new Hibernate ReturningWork, which will return the
// result of the next value in the Handle Sequence.
ReturningWork<Long> nextValReturningWork = new ReturningWork<Long>() {
@Override
public Long execute(Connection connection) throws SQLException {
Long nextVal = 0L;
// Determine what dialect we are using for this DB
DialectResolver dialectResolver = new StandardDialectResolver();
Dialect dialect = dialectResolver.resolveDialect(connection.getMetaData());
// Find the next value in our sequence (based on DB dialect)
try (PreparedStatement preparedStatement = connection.prepareStatement(dialect.getSequenceNextValString(HANDLE_SEQUENCE)))
{
// Execute query and return results
try(ResultSet resultSet = preparedStatement.executeQuery())
{
if(resultSet.next())
{
// Return result of query (from first column)
nextVal = resultSet.getLong(1);
}
}
}
return nextVal;
}
};
// Run our work, returning the next value in the sequence (see 'nextValReturningWork' above)
return getHibernateSession(context).doReturningWork(nextValReturningWork);
}
}

View File

@@ -65,14 +65,16 @@ implements DOIConnector
// Configuration property names
static final String CFG_USER = "identifier.doi.user";
static final String CFG_PASSWORD = "identifier.doi.password";
private static final String CFG_PREFIX
static final String CFG_PREFIX
= "identifier.doi.prefix";
private static final String CFG_PUBLISHER
static final String CFG_PUBLISHER
= "crosswalk.dissemination.DataCite.publisher";
private static final String CFG_DATAMANAGER
static final String CFG_DATAMANAGER
= "crosswalk.dissemination.DataCite.dataManager";
private static final String CFG_HOSTINGINSTITUTION
static final String CFG_HOSTINGINSTITUTION
= "crosswalk.dissemination.DataCite.hostingInstitution";
static final String CFG_NAMESPACE
= "crosswalk.dissemination.DataCite.namespace";
/**
* Stores the scheme used to connect to the DataCite server. It will be set
@@ -931,7 +933,9 @@ implements DOIConnector
{
return root;
}
Element identifier = new Element("identifier", "http://datacite.org/schema/kernel-3");
Element identifier = new Element("identifier",
configurationService.getProperty(CFG_NAMESPACE,
"http://datacite.org/schema/kernel-3"));
identifier.setAttribute("identifierType", "DOI");
identifier.addContent(doi.substring(DOI.SCHEME.length()));
return root.addContent(0, identifier);

View File

@@ -15,6 +15,8 @@ import org.dspace.administer.RegistryLoader;
import org.dspace.core.Context;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.workflow.factory.WorkflowServiceFactory;
import org.dspace.xmlworkflow.service.XmlWorkflowService;
import org.flywaydb.core.api.MigrationInfo;
import org.flywaydb.core.api.callback.FlywayCallback;
import org.slf4j.Logger;
@@ -74,8 +76,7 @@ public class DatabaseRegistryUpdater implements FlywayCallback
MetadataImporter.loadRegistry(base + "sword-metadata.xml", true);
// Check if XML Workflow is enabled in workflow.cfg
String framework = config.getProperty("workflow.framework");
if (framework!=null && framework.equals("xmlworkflow"))
if (WorkflowServiceFactory.getInstance().getWorkflowService() instanceof XmlWorkflowService)
{
// If so, load in the workflow metadata types as well
MetadataImporter.loadRegistry(base + "workflow-types.xml", true);

View File

@@ -191,13 +191,29 @@ public class DatabaseUtils
{
// Otherwise, we assume "argv[1]" is a valid migration version number
// This is only for testing! Never specify for Production!
System.out.println("Migrating database ONLY to version " + argv[1] + " ... (Check logs for details)");
String migrationVersion = argv[1];
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
System.out.println("You've specified to migrate your database ONLY to version " + migrationVersion + " ...");
System.out.println("\nWARNING: It is highly likely you will see errors in your logs when the Metadata");
System.out.println("or Bitstream Format Registry auto-update. This is because you are attempting to");
System.out.println("use an OLD version " + argv[1] + " Database with a newer DSpace API. NEVER do this in a");
System.out.println("use an OLD version " + migrationVersion + " Database with a newer DSpace API. NEVER do this in a");
System.out.println("PRODUCTION scenario. The resulting old DB is only useful for migration testing.\n");
// Update the database, to the version specified.
updateDatabase(dataSource, connection, argv[1], true);
System.out.print("Are you SURE you only want to migrate your database to version " + migrationVersion + "? [y/n]: ");
String choiceString = input.readLine();
input.close();
if (choiceString.equalsIgnoreCase("y"))
{
System.out.println("Migrating database ONLY to version " + migrationVersion + " ... (Check logs for details)");
// Update the database, to the version specified.
updateDatabase(dataSource, connection, migrationVersion, false);
}
else
{
System.out.println("No action performed.");
}
}
}
else
@@ -307,6 +323,10 @@ public class DatabaseUtils
System.out.println("Done.");
System.exit(0);
}
else
{
System.out.println("No action performed.");
}
}
catch(SQLException e)
{
@@ -550,8 +570,8 @@ public class DatabaseUtils
protected static synchronized void updateDatabase(DataSource datasource, Connection connection)
throws SQLException
{
// By default, upgrade to the *latest* version and run migrations out-of-order
updateDatabase(datasource, connection, null, true);
// By default, upgrade to the *latest* version and never run migrations out-of-order
updateDatabase(datasource, connection, null, false);
}
/**
@@ -1373,13 +1393,28 @@ public class DatabaseUtils
flywaydb = null;
}
/**
* Returns the current Flyway schema_version being used by the given database.
* (i.e. the version of the highest numbered migration that this database has run)
* @param connection current DB Connection
* @return version as string
* @throws SQLException if database error occurs
*/
public static String getCurrentFlywayState(Connection connection) throws SQLException {
PreparedStatement statement = connection.prepareStatement("SELECT \"version\" FROM \"schema_version\" ORDER BY \"installed_rank\" desc");
PreparedStatement statement = connection.prepareStatement("SELECT \"version\" FROM \"schema_version\" ORDER BY \"version\" desc");
ResultSet resultSet = statement.executeQuery();
resultSet.next();
return resultSet.getString("version");
}
/**
* Return the DSpace version that this Flyway-enabled database reports to be compatible with.
* The version is retrieved from Flyway, and parsed into a Double to represent an actual
* DSpace version number (e.g. 5.0, 6.0, etc)
* @param connection current DB Connection
* @return reported DSpace version as a Double
* @throws SQLException if database error occurs
*/
public static Double getCurrentFlywayDSpaceState(Connection connection) throws SQLException
{
String flywayState = getCurrentFlywayState(connection);

View File

@@ -7,7 +7,6 @@
*/
package org.dspace.storage.rdbms.xmlworkflow;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.storage.rdbms.DatabaseUtils;
import org.dspace.workflow.factory.WorkflowServiceFactory;
@@ -19,6 +18,18 @@ import org.flywaydb.core.internal.util.scanner.classpath.ClassPathResource;
import java.sql.Connection;
/**
* This class automatically migrates your DSpace Database to use the
* XML-based Configurable Workflow system whenever it is enabled.
* <P>
* Because XML-based Configurable Workflow existed prior to our migration, this
* class first checks for the existence of the "cwf_workflowitem" table before
* running any migrations.
* <P>
* This class represents a Flyway DB Java Migration
* http://flywaydb.org/documentation/migration/java.html
* <P>
* It can upgrade a 6.0 version of DSpace to use the XMLWorkflow.
*
* User: kevin (kevin at atmire.com)
* Date: 1/09/15
* Time: 11:34

View File

@@ -0,0 +1,30 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
------------------------------------------------------
-- DS-2775 Drop unused sequences
------------------------------------------------------
DROP SEQUENCE bitstream_seq;
DROP SEQUENCE bundle2bitstream_seq;
DROP SEQUENCE bundle_seq;
DROP SEQUENCE collection2item_seq;
DROP SEQUENCE collection_seq;
DROP SEQUENCE community2collection_seq;
DROP SEQUENCE community2community_seq;
DROP SEQUENCE community_seq;
DROP SEQUENCE dcvalue_seq;
DROP SEQUENCE eperson_seq;
DROP SEQUENCE epersongroup2eperson_seq;
DROP SEQUENCE epersongroup2workspaceitem_seq;
DROP SEQUENCE epersongroup_seq;
DROP SEQUENCE group2group_seq;
DROP SEQUENCE group2groupcache_seq;
DROP SEQUENCE historystate_seq;
DROP SEQUENCE item2bundle_seq;
DROP SEQUENCE item_seq;

View File

@@ -0,0 +1,15 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
----------------------------------------------------------------------------------
-- DS-3277 : 'handle_id' column needs its own separate sequence, so that Handles
-- can be minted from 'handle_seq'
----------------------------------------------------------------------------------
-- Create a new sequence for 'handle_id' column.
-- The role of this sequence is to simply provide a unique internal ID to the database.
CREATE SEQUENCE handle_id_seq;

View File

@@ -0,0 +1,44 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
----------------------------------------------------------------------------------
-- DS-3277 : 'handle_id' column needs its own separate sequence, so that Handles
-- can be minted from 'handle_seq'
----------------------------------------------------------------------------------
-- Create a new sequence for 'handle_id' column.
-- The role of this sequence is to simply provide a unique internal ID to the database.
CREATE SEQUENCE handle_id_seq;
-- Initialize new 'handle_id_seq' to the maximum value of 'handle_id'
DECLARE
curr NUMBER := 0;
BEGIN
SELECT max(handle_id) INTO curr FROM handle;
curr := curr + 1;
EXECUTE IMMEDIATE 'DROP SEQUENCE handle_id_seq';
EXECUTE IMMEDIATE 'CREATE SEQUENCE handle_id_seq START WITH ' || NVL(curr,1);
END;
/
-- Ensure the 'handle_seq' is updated to the maximum *suffix* in 'handle' column,
-- as this sequence is used to mint new Handles.
-- Code borrowed from update-sequences.sql and updateseq.sql
DECLARE
curr NUMBER := 0;
BEGIN
SELECT max(to_number(regexp_replace(handle, '.*/', ''), '999999999999')) INTO curr FROM handle WHERE REGEXP_LIKE(handle, '^.*/[0123456789]*$');
curr := curr + 1;
EXECUTE IMMEDIATE 'DROP SEQUENCE handle_seq';
EXECUTE IMMEDIATE 'CREATE SEQUENCE handle_seq START WITH ' || NVL(curr,1);
END;
/

View File

@@ -0,0 +1,30 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
----------------------------------------------------------------------------------
-- DS-3277 : 'handle_id' column needs its own separate sequence, so that Handles
-- can be minted from 'handle_seq'
----------------------------------------------------------------------------------
-- Create a new sequence for 'handle_id' column.
-- The role of this sequence is to simply provide a unique internal ID to the database.
CREATE SEQUENCE handle_id_seq;
-- Initialize new 'handle_id_seq' to the maximum value of 'handle_id'
SELECT setval('handle_id_seq', max(handle_id)) FROM handle;
-- Ensure the 'handle_seq' is updated to the maximum *suffix* in 'handle' column,
-- as this sequence is used to mint new Handles.
-- Code borrowed from update-sequences.sql
SELECT setval('handle_seq',
CAST (
max(
to_number(regexp_replace(handle, '.*/', ''), '999999999999')
)
AS BIGINT)
)
FROM handle
WHERE handle SIMILAR TO '%/[0123456789]*';

View File

@@ -17,7 +17,7 @@
--
-- This script is called automatically by the following
-- Flyway Java migration class:
-- org.dspace.storage.rdbms.migration.V5_0_2014_01_01__XMLWorkflow_Migration
-- org.dspace.storage.rdbms.xmlworkflow.V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration
----------------------------------------------------
-- Convert workflow groups:

View File

@@ -7,7 +7,7 @@
--
----------------------------------------------------
-- Database Schema Update for XML/Configurable Workflow
-- Database Schema Update for XML/Configurable Workflow (for DSpace 6.0)
--
-- This file will automatically create/update your
-- DSpace Database tables to support XML/Configurable workflows.
@@ -17,7 +17,7 @@
--
-- This script is called automatically by the following
-- Flyway Java migration class:
-- org.dspace.storage.rdbms.migration.V5_0_2014_01_01__XMLWorkflow_Migration
-- org.dspace.storage.rdbms.xmlworkflow.V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration
----------------------------------------------------
CREATE SEQUENCE cwf_workflowitem_seq;

View File

@@ -17,7 +17,7 @@
--
-- This script is called automatically by the following
-- Flyway Java migration class:
-- org.dspace.storage.rdbms.migration.V5_0_2014_01_01__XMLWorkflow_Migration
-- org.dspace.storage.rdbms.xmlworkflow.V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration
----------------------------------------------------
-- Convert workflow groups:

View File

@@ -17,7 +17,7 @@
--
-- This script is called automatically by the following
-- Flyway Java migration class:
-- org.dspace.storage.rdbms.migration.V5_0_2014_01_01__XMLWorkflow_Migration
-- org.dspace.storage.rdbms.xmlworkflow.V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration
----------------------------------------------------
CREATE SEQUENCE cwf_workflowitem_seq;

View File

@@ -1518,7 +1518,7 @@ public class ItemTest extends AbstractDSpaceObjectTest
}
/**
* Test of canEditBoolean method, of class Collection.
* Test of canEdit method, of class Item.
*/
@Test
public void testCanEditBooleanAuth() throws Exception
@@ -1543,7 +1543,7 @@ public class ItemTest extends AbstractDSpaceObjectTest
}
/**
* Test of canEditBoolean method, of class Collection.
* Test of canEdit method, of class Item.
*/
@Test
public void testCanEditBooleanAuth2() throws Exception
@@ -1568,7 +1568,7 @@ public class ItemTest extends AbstractDSpaceObjectTest
}
/**
* Test of canEditBoolean method, of class Collection.
* Test of canEdit method, of class Item.
*/
@Test
public void testCanEditBooleanAuth3() throws Exception
@@ -1595,7 +1595,7 @@ public class ItemTest extends AbstractDSpaceObjectTest
}
/**
* Test of canEditBoolean method, of class Collection.
* Test of canEdit method, of class Item.
*/
@Test
public void testCanEditBooleanAuth4() throws Exception
@@ -1617,11 +1617,33 @@ public class ItemTest extends AbstractDSpaceObjectTest
}};
// Ensure person with WRITE perms on the Collection can edit item
assertTrue("testCanEditBooleanAuth43 0", itemService.canEdit(context, it));
assertTrue("testCanEditBooleanAuth4 0", itemService.canEdit(context, it));
}
/**
* Test of canEditBoolean method, of class Collection.
* Test of canEdit method, of class Item.
*/
@Test
public void testCanEditBooleanAuth5() throws Exception
{
// Test Inheritance of permissions
new NonStrictExpectations(authorizeService.getClass())
{{
// Disallow Item WRITE perms
authorizeService.authorizeAction((Context) any, (Item) any,
Constants.WRITE); result = new AuthorizeException();
// Allow Collection WRITE perms
authorizeService.authorizeAction((Context) any, (Collection) any,
Constants.WRITE,anyBoolean); result = null;
}};
collectionService.createTemplateItem(context, collection);
collectionService.update(context, collection);
assertTrue("testCanEditBooleanNoAuth5 0", itemService.canEdit(context, collection.getTemplateItem()));
}
/**
* Test of canEdit method, of class Item.
*/
@Test
public void testCanEditBooleanNoAuth() throws Exception
@@ -1650,6 +1672,79 @@ public class ItemTest extends AbstractDSpaceObjectTest
assertFalse("testCanEditBooleanNoAuth 0", itemService.canEdit(context, it));
}
/**
* Test of canEdit method, of class Item.
*/
@Test
public void testCanEditBooleanNoAuth2() throws Exception
{
context.turnOffAuthorisationSystem();
WorkspaceItem wi = workspaceItemService.create(context, collection, true);
context.restoreAuthSystemState();
// Test Inheritance of permissions
new NonStrictExpectations(authorizeService.getClass())
{{
// Disallow Item WRITE perms
authorizeService.authorizeAction((Context) any, (Item) any,
Constants.WRITE, anyBoolean); result = new AuthorizeException();
}};
assertFalse("testCanEditBooleanNoAuth2 0", itemService.canEdit(context, wi.getItem()));
}
/**
* Test of isInProgressSubmission method, of class Item.
* @throws AuthorizeException
* @throws SQLException
* @throws IOException
*
*/
@Test
public void testIsInProgressSubmission() throws SQLException, AuthorizeException, IOException
{
context.turnOffAuthorisationSystem();
Collection c = createCollection();
WorkspaceItem wi = workspaceItemService.create(context, c, true);
context.restoreAuthSystemState();
assertTrue("testIsInProgressSubmission 0", itemService.isInProgressSubmission(context, wi.getItem()));
}
/**
* Test of isInProgressSubmission method, of class Item.
* @throws AuthorizeException
* @throws SQLException
* @throws IOException
*
*/
@Test
public void testIsInProgressSubmissionFalse() throws SQLException, AuthorizeException, IOException
{
context.turnOffAuthorisationSystem();
Collection c = createCollection();
WorkspaceItem wi = workspaceItemService.create(context, c, true);
Item item = installItemService.installItem(context, wi);
context.restoreAuthSystemState();
assertFalse("testIsInProgressSubmissionFalse 0", itemService.isInProgressSubmission(context, item));
}
/**
* Test of isInProgressSubmission method, of class Item.
* @throws AuthorizeException
* @throws SQLException
* @throws IOException
*
*/
@Test
public void testIsInProgressSubmissionFalse2() throws SQLException, AuthorizeException, IOException
{
context.turnOffAuthorisationSystem();
Collection c = createCollection();
collectionService.createTemplateItem(context, c);
collectionService.update(context, c);
Item item = c.getTemplateItem();
context.restoreAuthSystemState();
assertFalse("testIsInProgressSubmissionFalse2 0", itemService.isInProgressSubmission(context, item));
}
/**
* Test of getName method, of class Item.
*/

View File

@@ -295,11 +295,42 @@ public class WorkspaceItemTest extends AbstractUnitTest
* Test of update method, of class WorkspaceItem.
*/
@Test
public void testUpdate() throws Exception
public void testUpdateAuth() throws Exception
{
//TODO: how can we verify it works?
// no need to mockup the authorization as we are the same user that have
// created the wi
boolean pBefore = wi.isPublishedBefore();
wi.setPublishedBefore(!pBefore);
workspaceItemService.update(context, wi);
System.out.println("update");
context.commit();
// force to read the data from the database
context.clearCache();
// read all our test attributes objects from the fresh session
// to avoid duplicate object in session issue
wi = workspaceItemService.find(context, wi.getID());
collection = wi.getCollection();
owningCommunity = collection.getCommunities().get(0);
assertTrue("testUpdate", pBefore != wi.isPublishedBefore());
}
/**
* Test of update method, of class WorkspaceItem with no WRITE auth.
*/
@Test(expected=AuthorizeException.class)
public void testUpdateNoAuth() throws Exception
{
new NonStrictExpectations(authorizeService.getClass())
{{
// Remove Item WRITE perms
authorizeService.authorizeActionBoolean((Context) any, (Item) any,
Constants.WRITE); result = false;
authorizeService.authorizeAction((Context) any, (Item) any,
Constants.WRITE); result = new AuthorizeException();
}};
boolean pBefore = wi.isPublishedBefore();
wi.setPublishedBefore(!pBefore);
workspaceItemService.update(context, wi);
fail("Exception expected");
}
/**

View File

@@ -11,6 +11,7 @@ import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
@@ -19,6 +20,10 @@ import mockit.NonStrictExpectations;
import org.apache.log4j.Logger;
import org.dspace.AbstractUnitTest;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.*;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.content.factory.ContentServiceFactory;
@@ -31,8 +36,10 @@ import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.nullValue;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.eperson.service.GroupService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.services.ConfigurationService;
@@ -61,11 +68,15 @@ public class ITDSpaceAIP extends AbstractUnitTest
protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
protected ItemService itemService = ContentServiceFactory.getInstance().getItemService();
protected BundleService bundleService = ContentServiceFactory.getInstance().getBundleService();
protected BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();
protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService();
protected InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService();
protected HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
protected PluginService pluginService = CoreServiceFactory.getInstance().getPluginService();
protected ConfigurationService configService = DSpaceServicesFactory.getInstance().getConfigurationService();
protected ResourcePolicyService resourcePolicyService = AuthorizeServiceFactory.getInstance().getResourcePolicyService();
protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService();
protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
/** InfoMap multiple value separator (see saveObjectInfo() and assertObject* methods) **/
private static final String valueseparator = "::";
@@ -78,11 +89,15 @@ public class ITDSpaceAIP extends AbstractUnitTest
private static String submitterEmail = "aip-test@dspace.org";
private Context context;
/** Create a temporary folder which will be cleaned up automatically by JUnit.
NOTE: As a ClassRule, this temp folder is shared by ALL tests below.
Its AIP contents are initialized in init() below. **/
/** Create a global temporary upload folder which will be cleaned up automatically by JUnit.
NOTE: As a ClassRule, this temp folder is shared by ALL tests below. **/
@ClassRule
public static final TemporaryFolder testFolder = new TemporaryFolder();
public static final TemporaryFolder uploadTempFolder = new TemporaryFolder();
/** Create another temporary folder for AIPs. As a Rule, this one is *recreated* for each
test, in order to ensure each test is standalone with respect to AIPs. **/
@Rule
public final TemporaryFolder aipTempFolder = new TemporaryFolder();
/**
* This method will be run during class initialization. It will initialize
@@ -215,7 +230,7 @@ public class ITDSpaceAIP extends AbstractUnitTest
CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
Community topCommunity = (Community) handleService.resolveToObject(context, topCommunityHandle);
// Delete top level test community and test hierarchy under it
if(topCommunity!=null)
{
@@ -223,11 +238,21 @@ public class ITDSpaceAIP extends AbstractUnitTest
context.turnOffAuthorisationSystem();
communityService.delete(context, topCommunity);
context.restoreAuthSystemState();
context.complete();
context.commit();
}
// Delete the Eperson created to submit test items
EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
ePersonService.delete(context, ePersonService.findByEmail(context, submitterEmail));
EPerson eperson = ePersonService.findByEmail(context, submitterEmail);
if(eperson!=null)
{
log.info("tearDownClass() - DESTROY TEST EPERSON");
context.turnOffAuthorisationSystem();
ePersonService.delete(context, eperson);
context.restoreAuthSystemState();
context.commit();
}
if(context.isValid())
context.abort();
}
@@ -251,37 +276,13 @@ public class ITDSpaceAIP extends AbstractUnitTest
// JUnit TemporaryFolder. This ensures Crosswalk classes like RoleCrosswalk
// store their temp files in a place where JUnit can clean them up automatically.
new NonStrictExpectations(configService.getClass()) {{
configService.getProperty("upload.temp.dir"); result = testFolder.getRoot().getAbsolutePath();
configService.getProperty("upload.temp.dir"); result = uploadTempFolder.getRoot().getAbsolutePath();
}};
try
{
context = new Context();
context.setCurrentUser(EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, submitterEmail));
// Locate the top level community (from our test data)
Community topCommunity = (Community) handleService.resolveToObject(context, topCommunityHandle);
log.info("init() - CREATE TEST AIPS");
// NOTE: This will not overwrite the AIPs if they already exist.
// But, it does ensure they are created PRIOR to running any of the below tests.
// (So, essentially, this runs ONCE...after that, it'll be ignored since AIPs already exist)
// While ideally, you don't want to share data between tests, generating AIPs is VERY timeconsuming.
createAIP(topCommunity, null, true, false);
}
catch(PackageException|CrosswalkException ex)
{
log.error("Packaging Error in init()", ex);
fail("Packaging Error in init(): " + ex.getMessage());
}
catch (AuthorizeException ex)
{
log.error("Authorization Error in init()", ex);
fail("Authorization Error in init(): " + ex.getMessage());
}
catch (IOException ex)
{
log.error("IO Error in init()", ex);
fail("IO Error in init(): " + ex.getMessage());
}
catch (SQLException ex)
{
@@ -322,9 +323,9 @@ public class ITDSpaceAIP extends AbstractUnitTest
HashMap<String,String> infoMap = new HashMap<String,String>();
saveObjectInfo(topCommunity, infoMap);
// Ensure community & child AIPs are exported (but don't overwrite)
// Export community & child AIPs
log.info("testRestoreCommunityHierarchy() - CREATE AIPs");
File aipFile = createAIP(topCommunity, null, true, false);
File aipFile = createAIP(topCommunity, null, true);
// Delete everything from parent community on down
log.info("testRestoreCommunityHierarchy() - DELETE Community Hierarchy");
@@ -353,6 +354,80 @@ public class ITDSpaceAIP extends AbstractUnitTest
log.info("testRestoreCommunityHierarchy() - END");
}
/**
* Test restoration from AIP of an access restricted Community
*/
@Test
public void testRestoreRestrictedCommunity() throws Exception
{
new NonStrictExpectations(authorizeService.getClass())
{{
// Allow Full Admin permissions. Since we are working with an object
// hierarchy (Items/Bundles/Bitstreams) you need full admin rights
authorizeService.isAdmin((Context) any); result = true;
}};
log.info("testRestoreRestrictedCommunity() - BEGIN");
// Locate the top-level Community (as a parent)
Community parent = (Community) handleService.resolveToObject(context, topCommunityHandle);
// Create a brand new (empty) Community to test with
Community community = communityService.createSubcommunity(context, parent);
communityService.addMetadata(context, community, "dc", "title", null, null, "Restricted Community");
communityService.update(context, community);
String communityHandle = community.getHandle();
// Create a new Group to access restrict to
Group group = groupService.create(context);
groupService.setName(group, "Special Users");
groupService.update(context, group);
// Create a custom resource policy for this community
List<ResourcePolicy> policies = new ArrayList<>();
ResourcePolicy policy = resourcePolicyService.create(context);
policy.setRpName("Special Read Only");
policy.setGroup(group);
policy.setAction(Constants.READ);
policies.add(policy);
// Replace default community policies with this new one
authorizeService.removeAllPolicies(context, community);
authorizeService.addPolicies(context, policies, community);
// Export collection AIP
log.info("testRestoreRestrictedCommunity() - CREATE Community AIP");
File aipFile = createAIP(community, null, false);
// Now, delete that Collection
log.info("testRestoreRestrictedCommunity() - DELETE Community");
communityService.removeSubcommunity(context, parent, community);
// Assert the deleted collection no longer exists
DSpaceObject obj = handleService.resolveToObject(context, communityHandle);
assertThat("testRestoreRestrictedCommunity() Community " + communityHandle + " doesn't exist", obj, nullValue());
// Restore Collection from AIP (non-recursive)
log.info("testRestoreRestrictedCommunity() - RESTORE Community");
restoreFromAIP(parent, aipFile, null, false);
// Assert the deleted Collection is RESTORED
DSpaceObject objRestored = handleService.resolveToObject(context, communityHandle);
assertThat("testRestoreRestrictedCommunity() Community " + communityHandle + " exists", objRestored, notNullValue());
// Assert the number of restored policies is equal
List<ResourcePolicy> policiesRestored = authorizeService.getPolicies(context, objRestored);
assertEquals("testRestoreRestrictedCommunity() restored policy count equal", policies.size(), policiesRestored.size());
// Assert the restored policy has same name, group and permission settings
ResourcePolicy restoredPolicy = policiesRestored.get(0);
assertEquals("testRestoreRestrictedCommunity() restored policy group successfully", policy.getGroup().getName(), restoredPolicy.getGroup().getName());
assertEquals("testRestoreRestrictedCommunity() restored policy action successfully", policy.getAction(), restoredPolicy.getAction());
assertEquals("testRestoreRestrictedCommunity() restored policy name successfully", policy.getRpName(), restoredPolicy.getRpName());
log.info("testRestoreRestrictedCommunity() - END");
}
/**
* Test replacement from AIP of entire Community Hierarchy
*/
@@ -374,9 +449,9 @@ public class ITDSpaceAIP extends AbstractUnitTest
// Get the count of collections under our Community or any Sub-Communities
int numberOfCollections = communityService.getAllCollections(context, topCommunity).size();
// Ensure community & child AIPs are exported (but don't overwrite)
// Export community & child AIPs
log.info("testReplaceCommunityHierarchy() - CREATE AIPs");
File aipFile = createAIP(topCommunity, null, true, false);
File aipFile = createAIP(topCommunity, null, true);
// Get some basic info about Collection to be deleted
// In this scenario, we'll delete the test "Grandchild Collection"
@@ -448,9 +523,9 @@ public class ITDSpaceAIP extends AbstractUnitTest
// Get its current name / title
String oldName = topCommunity.getName();
// Ensure only community AIP is exported (but don't overwrite)
// Export only community AIP
log.info("testReplaceCommunityOnly() - CREATE Community AIP");
File aipFile = createAIP(topCommunity, null, false, false);
File aipFile = createAIP(topCommunity, null, false);
// Change the Community name
String newName = "This is NOT my Community name!";
@@ -492,9 +567,9 @@ public class ITDSpaceAIP extends AbstractUnitTest
HashMap<String,String> infoMap = new HashMap<String,String>();
saveObjectInfo(testCollection, infoMap);
// Ensure collection & child AIPs are exported (but don't overwrite)
// Export collection & child AIPs
log.info("testRestoreCollectionHierarchy() - CREATE AIPs");
File aipFile = createAIP(testCollection, null, true, false);
File aipFile = createAIP(testCollection, null, true);
// Delete everything from collection on down
log.info("testRestoreCollectionHierarchy() - DELETE Collection Hierarchy");
@@ -513,6 +588,80 @@ public class ITDSpaceAIP extends AbstractUnitTest
log.info("testRestoreCollectionHierarchy() - END");
}
/**
* Test restoration from AIP of an access restricted Collection
*/
@Test
public void testRestoreRestrictedCollection() throws Exception
{
new NonStrictExpectations(authorizeService.getClass())
{{
// Allow Full Admin permissions. Since we are working with an object
// hierarchy (Items/Bundles/Bitstreams) you need full admin rights
authorizeService.isAdmin((Context) any); result = true;
}};
log.info("testRestoreRestrictedCollection() - BEGIN");
// Locate the top-level Community (as a parent)
Community parent = (Community) handleService.resolveToObject(context, topCommunityHandle);
// Create a brand new (empty) Collection to test with
Collection collection = collectionService.create(context, parent);
collectionService.addMetadata(context, collection, "dc", "title", null, null, "Restricted Collection");
collectionService.update(context, collection);
String collectionHandle = collection.getHandle();
// Create a new Group to access restrict to
Group group = groupService.create(context);
groupService.setName(group, "Special Users");
groupService.update(context, group);
// Create a custom resource policy for this Collection
List<ResourcePolicy> policies = new ArrayList<>();
ResourcePolicy policy = resourcePolicyService.create(context);
policy.setRpName("Special Read Only");
policy.setGroup(group);
policy.setAction(Constants.READ);
policies.add(policy);
// Replace default Collection policies with this new one
authorizeService.removeAllPolicies(context, collection);
authorizeService.addPolicies(context, policies, collection);
// Export collection AIP
log.info("testRestoreRestrictedCollection() - CREATE Collection AIP");
File aipFile = createAIP(collection, null, false);
// Now, delete that Collection
log.info("testRestoreRestrictedCollection() - DELETE Collection");
communityService.removeCollection(context, parent, collection);
// Assert the deleted collection no longer exists
DSpaceObject obj = handleService.resolveToObject(context, collectionHandle);
assertThat("testRestoreRestrictedCollection() Collection " + collectionHandle + " doesn't exist", obj, nullValue());
// Restore Collection from AIP (non-recursive)
log.info("testRestoreRestrictedCollection() - RESTORE Collection");
restoreFromAIP(parent, aipFile, null, false);
// Assert the deleted Collection is RESTORED
DSpaceObject objRestored = handleService.resolveToObject(context, collectionHandle);
assertThat("testRestoreRestrictedCollection() Collection " + collectionHandle + " exists", objRestored, notNullValue());
// Assert the number of restored policies is equal
List<ResourcePolicy> policiesRestored = authorizeService.getPolicies(context, objRestored);
assertEquals("testRestoreRestrictedCollection() restored policy count equal", policies.size(), policiesRestored.size());
// Assert the restored policy has same name, group and permission settings
ResourcePolicy restoredPolicy = policiesRestored.get(0);
assertEquals("testRestoreRestrictedCollection() restored policy group successfully", policy.getGroup().getName(), restoredPolicy.getGroup().getName());
assertEquals("testRestoreRestrictedCollection() restored policy action successfully", policy.getAction(), restoredPolicy.getAction());
assertEquals("testRestoreRestrictedCollection() restored policy name successfully", policy.getRpName(), restoredPolicy.getRpName());
log.info("testRestoreRestrictedCollection() - END");
}
/**
* Test replacement from AIP of entire Collection (with Items)
*/
@@ -534,9 +683,9 @@ public class ITDSpaceAIP extends AbstractUnitTest
// How many items are in this Collection?
int numberOfItems = itemService.countItems(context, testCollection);
// Ensure collection & child AIPs are exported (but don't overwrite)
// Export collection & child AIPs
log.info("testReplaceCollectionHierarchy() - CREATE AIPs");
File aipFile = createAIP(testCollection, null, true, false);
File aipFile = createAIP(testCollection, null, true);
// Get some basic info about Item to be deleted
// In this scenario, we'll delete the test "Grandchild Collection Item #1"
@@ -592,9 +741,9 @@ public class ITDSpaceAIP extends AbstractUnitTest
// Get its current name / title
String oldName = testCollection.getName();
// Ensure only collection AIP is exported (but don't overwrite)
// Export only collection AIP
log.info("testReplaceCollectionOnly() - CREATE Collection AIP");
File aipFile = createAIP(testCollection, null, false, false);
File aipFile = createAIP(testCollection, null, false);
// Change the Collection name
String newName = "This is NOT my Collection name!";
@@ -651,13 +800,13 @@ public class ITDSpaceAIP extends AbstractUnitTest
if(bitstreamCount<=0)
fail("No test bitstream found for Item in testRestoreItem()!");
// Ensure item AIP is exported (but don't overwrite)
// Export item AIP
log.info("testRestoreItem() - CREATE Item AIP");
File aipFile = createAIP(testItem, null, false, false);
File aipFile = createAIP(testItem, null, false);
// Get parent, so we can restore under the same parent
Collection parent = (Collection) itemService.getParentObject(context, testItem);
// Now, delete that item
log.info("testRestoreItem() - DELETE Item");
collectionService.removeItem(context, parent, testItem);
@@ -683,6 +832,143 @@ public class ITDSpaceAIP extends AbstractUnitTest
log.info("testRestoreItem() - END");
}
/**
* Test restoration from AIP of an access restricted Item
*/
@Test
public void testRestoreRestrictedItem() throws Exception
{
new NonStrictExpectations(authorizeService.getClass())
{{
// Allow Full Admin permissions. Since we are working with an object
// hierarchy (Items/Bundles/Bitstreams) you need full admin rights
authorizeService.isAdmin((Context) any); result = true;
}};
log.info("testRestoreRestrictedItem() - BEGIN");
// Locate the test Collection (as a parent)
Collection parent = (Collection) handleService.resolveToObject(context, testCollectionHandle);
// Create a brand new Item to test with (since we will be changing policies)
WorkspaceItem wsItem = workspaceItemService.create(context, parent, false);
Item item = installItemService.installItem(context, wsItem);
itemService.addMetadata(context, item, "dc", "title", null, null, "Test Restricted Item");
// Create a test Bitstream in the ORIGINAL bundle
File f = new File(testProps.get("test.bitstream").toString());
Bitstream b = itemService.createSingleBitstream(context, new FileInputStream(f), item);
b.setName(context, "Test Bitstream");
bitstreamService.update(context, b);
itemService.update(context, item);
// Create a custom resource policy for this Item
List<ResourcePolicy> policies = new ArrayList<>();
ResourcePolicy admin_policy = resourcePolicyService.create(context);
admin_policy.setRpName("Admin Read-Only");
Group adminGroup = groupService.findByName(context, Group.ADMIN);
admin_policy.setGroup(adminGroup);
admin_policy.setAction(Constants.READ);
policies.add(admin_policy);
itemService.replaceAllItemPolicies(context, item, policies);
// Export item AIP
log.info("testRestoreRestrictedItem() - CREATE Item AIP");
File aipFile = createAIP(item, null, false);
// Get item handle, so we can check that it is later restored properly
String itemHandle = item.getHandle();
// Now, delete that item
log.info("testRestoreRestrictedItem() - DELETE Item");
collectionService.removeItem(context, parent, item);
// Assert the deleted item no longer exists
DSpaceObject obj = handleService.resolveToObject(context, itemHandle);
assertThat("testRestoreRestrictedItem() item " + itemHandle + " doesn't exist", obj, nullValue());
// Restore Item from AIP (non-recursive)
log.info("testRestoreRestrictedItem() - RESTORE Item");
restoreFromAIP(parent, aipFile, null, false);
// Assert the deleted item is RESTORED
DSpaceObject objRestored = handleService.resolveToObject(context, itemHandle);
assertThat("testRestoreRestrictedItem() item " + itemHandle + " exists", objRestored, notNullValue());
// Assert the number of restored policies is equal
List<ResourcePolicy> policiesRestored = authorizeService.getPolicies(context, objRestored);
assertEquals("testRestoreRestrictedItem() restored policy count equal", policies.size(), policiesRestored.size());
// Assert the restored policy has same name, group and permission settings
ResourcePolicy restoredPolicy = policiesRestored.get(0);
assertEquals("testRestoreRestrictedItem() restored policy group successfully", admin_policy.getGroup().getName(), restoredPolicy.getGroup().getName());
assertEquals("testRestoreRestrictedItem() restored policy action successfully", admin_policy.getAction(), restoredPolicy.getAction());
assertEquals("testRestoreRestrictedItem() restored policy name successfully", admin_policy.getRpName(), restoredPolicy.getRpName());
log.info("testRestoreRestrictedItem() - END");
}
/**
* Test restoration from AIP of an Item that has no access policies associated with it.
*/
@Test
public void testRestoreItemNoPolicies() throws Exception
{
new NonStrictExpectations(authorizeService.getClass())
{{
// Allow Full Admin permissions. Since we are working with an object
// hierarchy (Items/Bundles/Bitstreams) you need full admin rights
authorizeService.isAdmin((Context) any); result = true;
}};
log.info("testRestoreItemNoPolicies() - BEGIN");
// Locate the test Collection (as a parent)
Collection parent = (Collection) handleService.resolveToObject(context, testCollectionHandle);
// Create a brand new Item to test with (since we will be changing policies)
WorkspaceItem wsItem = workspaceItemService.create(context, parent, false);
Item item = installItemService.installItem(context, wsItem);
itemService.addMetadata(context, item, "dc", "title", null, null, "Test No Policies Item");
// Create a test Bitstream in the ORIGINAL bundle
File f = new File(testProps.get("test.bitstream").toString());
Bitstream b = itemService.createSingleBitstream(context, new FileInputStream(f), item);
b.setName(context, "Test Bitstream");
bitstreamService.update(context, b);
itemService.update(context, item);
// Remove all existing policies from the Item
authorizeService.removeAllPolicies(context, item);
// Export item AIP
log.info("testRestoreItemNoPolicies() - CREATE Item AIP");
File aipFile = createAIP(item, null, false);
// Get item handle, so we can check that it is later restored properly
String itemHandle = item.getHandle();
// Now, delete that item
log.info("testRestoreItemNoPolicies() - DELETE Item");
collectionService.removeItem(context, parent, item);
// Assert the deleted item no longer exists
DSpaceObject obj = handleService.resolveToObject(context, itemHandle);
assertThat("testRestoreItemNoPolicies() item " + itemHandle + " doesn't exist", obj, nullValue());
// Restore Item from AIP (non-recursive)
log.info("testRestoreItemNoPolicies() - RESTORE Item");
restoreFromAIP(parent, aipFile, null, false);
// Assert the deleted item is RESTORED
DSpaceObject objRestored = handleService.resolveToObject(context, itemHandle);
assertThat("testRestoreItemNoPolicies() item " + itemHandle + " exists", objRestored, notNullValue());
// Assert the restored item also has ZERO policies
List<ResourcePolicy> policiesRestored = authorizeService.getPolicies(context, objRestored);
assertEquals("testRestoreItemNoPolicies() restored policy count is zero", 0, policiesRestored.size());
log.info("testRestoreItemNoPolicies() - END");
}
/**
* Test replacement from AIP of an Item object
*/
@@ -704,9 +990,9 @@ public class ITDSpaceAIP extends AbstractUnitTest
// Get its current name / title
String oldName = testItem.getName();
// Ensure item AIP is exported (but don't overwrite)
// Export item AIP
log.info("testReplaceItem() - CREATE Item AIP");
File aipFile = createAIP(testItem, null, false, false);
File aipFile = createAIP(testItem, null, false);
// Change the Item name
String newName = "This is NOT my Item name!";
@@ -750,9 +1036,9 @@ public class ITDSpaceAIP extends AbstractUnitTest
List<Collection> mappedCollections = item.getCollections();
assertEquals("testRestoreMappedItem() item " + testMappedItemHandle + " is mapped to multiple collections", 2, mappedCollections.size());
// Ensure mapped item AIP is exported (but don't overwrite)
// Export mapped item AIP
log.info("testRestoreMappedItem() - CREATE Mapped Item AIP");
File aipFile = createAIP(item, null, false, false);
File aipFile = createAIP(item, null, false);
// Now, delete that item (must be removed from BOTH collections to delete it)
log.info("testRestoreMappedItem() - DELETE Item");
@@ -784,10 +1070,9 @@ public class ITDSpaceAIP extends AbstractUnitTest
* @param dso DSpaceObject to create AIP(s) for
* @param pkParams any special PackageParameters to pass (if any)
* @param recursive whether to recursively create AIPs or just a single AIP
* @param overwrite whether to overwrite the local AIP file if it is found
* @return exported root AIP file
*/
private File createAIP(DSpaceObject dso, PackageParameters pkgParams, boolean recursive, boolean overwrite)
private File createAIP(DSpaceObject dso, PackageParameters pkgParams, boolean recursive)
throws PackageException, CrosswalkException, AuthorizeException, SQLException, IOException
{
// Get a reference to the configured "AIP" package disseminator
@@ -796,14 +1081,13 @@ public class ITDSpaceAIP extends AbstractUnitTest
if (dip == null)
{
fail("Could not find a disseminator for type 'AIP'");
return null;
}
// Export file (this is placed in JUnit's temporary folder, so that it can be cleaned up after tests complete)
File exportAIPFile = new File(testFolder.getRoot().getAbsolutePath() + File.separator + PackageUtils.getPackageName(dso, "zip"));
// To save time, we'll skip re-exporting AIPs, unless overwrite == true
if(!exportAIPFile.exists() || overwrite)
else
{
// Export file (this is placed in JUnit's temporary folder, so that it can be cleaned up after tests complete)
File exportAIPFile = new File(aipTempFolder.getRoot().getAbsolutePath() + File.separator + PackageUtils.getPackageName(dso, "zip"));
// If unspecified, set default PackageParameters
if (pkgParams==null)
pkgParams = new PackageParameters();
@@ -813,9 +1097,9 @@ public class ITDSpaceAIP extends AbstractUnitTest
dip.disseminateAll(context, dso, pkgParams, exportAIPFile);
else
dip.disseminate(context, dso, pkgParams, exportAIPFile);
return exportAIPFile;
}
return exportAIPFile;
}
/**
@@ -835,24 +1119,26 @@ public class ITDSpaceAIP extends AbstractUnitTest
{
fail("Could not find a ingestor for type 'AIP'");
}
if(!aipFile.exists())
{
fail("AIP Package File does NOT exist: " + aipFile.getAbsolutePath());
}
// If unspecified, set default PackageParameters
if(pkgParams==null)
pkgParams = new PackageParameters();
// Ensure restore mode is enabled
pkgParams.setRestoreModeEnabled(true);
// Actually ingest the object(s) from AIPs
if(recursive)
sip.ingestAll(context, parent, aipFile, pkgParams, null);
else
sip.ingest(context, parent, aipFile, pkgParams, null);
{
if(!aipFile.exists())
{
fail("AIP Package File does NOT exist: " + aipFile.getAbsolutePath());
}
// If unspecified, set default PackageParameters
if(pkgParams==null)
pkgParams = new PackageParameters();
// Ensure restore mode is enabled
pkgParams.setRestoreModeEnabled(true);
// Actually ingest the object(s) from AIPs
if(recursive)
sip.ingestAll(context, parent, aipFile, pkgParams, null);
else
sip.ingest(context, parent, aipFile, pkgParams, null);
}
}
/**
@@ -872,24 +1158,26 @@ public class ITDSpaceAIP extends AbstractUnitTest
{
fail("Could not find a ingestor for type 'AIP'");
}
if(!aipFile.exists())
{
fail("AIP Package File does NOT exist: " + aipFile.getAbsolutePath());
}
// If unspecified, set default PackageParameters
if (pkgParams==null)
pkgParams = new PackageParameters();
// Ensure restore mode is enabled
pkgParams.setRestoreModeEnabled(true);
// Actually replace the object(s) from AIPs
if(recursive)
sip.replaceAll(context, dso, aipFile, pkgParams);
else
sip.replace(context, dso, aipFile, pkgParams);
{
if(!aipFile.exists())
{
fail("AIP Package File does NOT exist: " + aipFile.getAbsolutePath());
}
// If unspecified, set default PackageParameters
if (pkgParams==null)
pkgParams = new PackageParameters();
// Ensure restore mode is enabled
pkgParams.setRestoreModeEnabled(true);
// Actually replace the object(s) from AIPs
if(recursive)
sip.replaceAll(context, dso, aipFile, pkgParams);
else
sip.replace(context, dso, aipFile, pkgParams);
}
}
/**

View File

@@ -31,6 +31,7 @@ import static org.junit.Assert.*;
@RunWith(Parameterized.class)
public class MultiFormatDateParserTest
{
private static Locale vmLocale;
private final String testMessage;
private final String toParseDate;
private final String expectedFormat;
@@ -82,6 +83,10 @@ public class MultiFormatDateParserTest
@BeforeClass
public static void setUpClass()
{
// store default locale of the environment
vmLocale = Locale.getDefault();
// set default locale to English just for the test of this class
Locale.setDefault(Locale.ENGLISH);
Map<String, String> formats = new HashMap<>(32);
formats.put("\\d{8}" ,"yyyyMMdd");
formats.put("\\d{1,2}-\\d{1,2}-\\d{4}", "dd-MM-yyyy");
@@ -119,6 +124,8 @@ public class MultiFormatDateParserTest
@AfterClass
public static void tearDownClass()
{
// restore locale
Locale.setDefault(vmLocale);
}
@Before

View File

@@ -219,7 +219,7 @@
<dependency>
<groupId>com.yahoo.platform.yui</groupId>
<artifactId>yuicompressor</artifactId>
<version>2.4.8</version>
<version>2.3.6</version>
<exclusions>
<exclusion>
<groupId>rhino</groupId>

View File

@@ -9,7 +9,6 @@ package org.dspace.app.xmlui.aspect.administrative;
import org.apache.cocoon.environment.Request;
import org.apache.cocoon.servlet.multipart.Part;
import org.apache.commons.lang.StringUtils;
import org.dspace.app.xmlui.utils.UIException;
import org.dspace.app.xmlui.wing.Message;
import org.dspace.authorize.AuthorizeException;
@@ -43,6 +42,7 @@ import org.dspace.workflow.WorkflowService;
import org.dspace.workflow.factory.WorkflowServiceFactory;
import org.dspace.xmlworkflow.WorkflowConfigurationException;
import org.dspace.xmlworkflow.WorkflowUtils;
import org.dspace.xmlworkflow.service.XmlWorkflowService;
import org.jdom.JDOMException;
import org.jdom.input.SAXBuilder;
import org.xml.sax.SAXException;
@@ -59,6 +59,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.UUID;
/**
* Utility methods to processes actions on Communities and Collections.
*
@@ -540,7 +541,7 @@ public class FlowContainerUtils
collectionService.removeSubmitters(context, collection);
}
else{
if(StringUtils.equals(DSpaceServicesFactory.getInstance().getConfigurationService().getProperty("workflow.framework"), "xmlworkflow"))
if(WorkflowServiceFactory.getInstance().getWorkflowService() instanceof XmlWorkflowService)
{
WorkflowUtils.deleteRoleGroup(context, collection, roleName);
}else{

View File

@@ -25,10 +25,11 @@ import org.dspace.core.LogManager;
import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.GroupService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.workflow.factory.WorkflowServiceFactory;
import org.dspace.xmlworkflow.Role;
import org.dspace.xmlworkflow.WorkflowConfigurationException;
import org.dspace.xmlworkflow.WorkflowUtils;
import org.dspace.xmlworkflow.service.XmlWorkflowService;
import java.io.IOException;
import java.sql.SQLException;
@@ -247,7 +248,7 @@ public class AssignCollectionRoles extends AbstractDSpaceTransformer
tableRow.addCell(1,2).addHighlight("fade offset").addContent(T_help_default_read);
if(StringUtils.equals(DSpaceServicesFactory.getInstance().getConfigurationService().getProperty("workflow.framework"), "xmlworkflow")) {
if(WorkflowServiceFactory.getInstance().getWorkflowService() instanceof XmlWorkflowService) {
try{
HashMap<String, Role> roles = WorkflowUtils.getAllExternalRoles(thisCollection);
addXMLWorkflowRoles(thisCollection, baseURL, roles, rolesTable);

View File

@@ -29,7 +29,10 @@ importClass(Packages.org.dspace.eperson.EPerson);
importClass(Packages.org.dspace.eperson.Group);
importClass(Packages.org.dspace.app.util.Util);
importClass(Packages.org.dspace.workflow.factory.WorkflowServiceFactory);
importClass(Packages.org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory);
importClass(Packages.org.dspace.xmlworkflow.service.XmlWorkflowService);
importClass(Packages.java.util.Set);
importClass(Packages.org.dspace.app.xmlui.utils.FlowscriptUtils);
@@ -45,7 +48,6 @@ importClass(Packages.org.dspace.app.xmlui.aspect.administrative.FlowCurationUtil
importClass(Packages.org.dspace.app.xmlui.aspect.administrative.FlowMetadataImportUtils);
importClass(Packages.org.dspace.app.xmlui.aspect.administrative.FlowBatchImportUtils);
importClass(Packages.java.lang.System);
importClass(Packages.org.dspace.core.ConfigurationManager);
/**
* Simple access method to access the current cocoon object model.
@@ -2698,7 +2700,7 @@ function doAssignCollectionRoles(collectionID)
{
result = doDeleteCollectionRole(collectionID, "DEFAULT_READ");
}else{
if(StringUtils.equals(ConfigurationManager.getProperty("workflow.framework"), "xmlworkflow")){
if(WorkflowServiceFactory.getInstance().getWorkflowService() instanceof XmlWorkflowService){
if(workflow == null){
var collection = getCollectionService().find(getDSContext(),collectionID);
workflow = getXmlWorkflowFactory().getWorkflow(collection);

View File

@@ -474,6 +474,7 @@ crosswalk.dissemination.DataCite.preferList = false
crosswalk.dissemination.DataCite.publisher = My University
#crosswalk.dissemination.DataCite.dataManager = # defaults to publisher
#crosswalk.dissemination.DataCite.hostingInstitution = # defaults to publisher
crosswalk.dissemination.DataCite.namespace = http://datacite.org/schema/kernel-3
# Crosswalk Plugin Configuration:
# The purpose of Crosswalks is to translate an external metadata format to/from

View File

@@ -64,17 +64,19 @@
<mapping class="org.dspace.versioning.Version"/>
<mapping class="org.dspace.versioning.VersionHistory"/>
<mapping class="org.dspace.app.requestitem.RequestItem"/>
<!--Basic workflow services, comment or remove when switching to the configurable workflow -->
<mapping class="org.dspace.workflowbasic.BasicWorkflowItem"/>
<mapping class="org.dspace.workflowbasic.TaskListItem"/>
<mapping class="org.dspace.app.requestitem.RequestItem"/>
<!--<mapping class="org.dspace.xmlworkflow.storedcomponents.ClaimedTask"/>-->
<!--<mapping class="org.dspace.xmlworkflow.storedcomponents.CollectionRole"/>-->
<!--<mapping class="org.dspace.xmlworkflow.storedcomponents.InProgressUser"/>-->
<!--<mapping class="org.dspace.xmlworkflow.storedcomponents.PoolTask"/>-->
<!--<mapping class="org.dspace.xmlworkflow.storedcomponents.WorkflowItemRole"/>-->
<!--<mapping class="org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem"/>-->
<!--Configurable workflow services, uncomment the xml workflow classes below to enable the configurable workflow-->
<!--<mapping class="org.dspace.xmlworkflow.storedcomponents.ClaimedTask"/>
<mapping class="org.dspace.xmlworkflow.storedcomponents.CollectionRole"/>
<mapping class="org.dspace.xmlworkflow.storedcomponents.InProgressUser"/>
<mapping class="org.dspace.xmlworkflow.storedcomponents.PoolTask"/>
<mapping class="org.dspace.xmlworkflow.storedcomponents.WorkflowItemRole"/>
<mapping class="org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem"/>-->
</session-factory>
</hibernate-configuration>

View File

@@ -4,12 +4,12 @@
# Configuration properties used solely by the Configurable #
# Reviewer Workflow (XMLUI only) #
#---------------------------------------------------------------#
#Selection of workflow framework that will be used in DSpace
# Possible values:
# originalworkflow = Traditional DSpace Workflow
# xmlworkflow = New (as of 1.8.0) Configurable Reviewer Workflow
workflow.framework=originalworkflow
#workflow.framework=xmlworkflow
#
# Workflow framework used by DSpace is now determined by the configured
# WorkflowService implementation in [dspace.dir]/config/spring/api/core-services.xml
# One of two WorkflowServices should be enabled in that file:
# org.dspace.workflowbasic.BasicWorkflowServiceImpl = Traditional DSpace Workflow
# org.dspace.xmlworkflow.XmlWorkflowServiceImpl = Configurable (XML) Workflow
#Allow the reviewers to add/edit/remove files from the submission
#When changing this property you might want to alert submitters in the license that reviewers can alter their files

View File

@@ -103,19 +103,15 @@
<bean class="org.dspace.workflowbasic.BasicWorkflowItemServiceImpl"/>
<bean class="org.dspace.workflowbasic.BasicWorkflowServiceImpl"/>
<!--<bean class="org.dspace.xmlworkflow.storedcomponents.ClaimedTaskServiceImpl"/>-->
<!--<bean class="org.dspace.xmlworkflow.storedcomponents.CollectionRoleServiceImpl"/>-->
<!--<bean class="org.dspace.xmlworkflow.storedcomponents.InProgressUserServiceImpl"/>-->
<!--<bean class="org.dspace.xmlworkflow.storedcomponents.PoolTaskServiceImpl"/>-->
<!--<bean class="org.dspace.xmlworkflow.storedcomponents.WorkflowItemRoleServiceImpl"/>-->
<!--<bean class="org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItemServiceImpl"/>-->
<!--Configurable workflow services, uncomment the xml workflow beans below to enable the configurable workflow-->
<!--<bean class="org.dspace.xmlworkflow.XmlWorkflowServiceImpl"/>-->
<!--<bean class="org.dspace.xmlworkflow.WorkflowRequirementsServiceImpl"/>-->
<!--<bean class="org.dspace.xmlworkflow.XmlWorkflowFactoryImpl"/>-->
<!--<bean class="org.dspace.xmlworkflow.storedcomponents.ClaimedTaskServiceImpl"/>
<bean class="org.dspace.xmlworkflow.storedcomponents.CollectionRoleServiceImpl"/>
<bean class="org.dspace.xmlworkflow.storedcomponents.InProgressUserServiceImpl"/>
<bean class="org.dspace.xmlworkflow.storedcomponents.PoolTaskServiceImpl"/>
<bean class="org.dspace.xmlworkflow.storedcomponents.WorkflowItemRoleServiceImpl"/>
<bean class="org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItemServiceImpl"/>
<bean class="org.dspace.xmlworkflow.XmlWorkflowServiceImpl"/>
<bean class="org.dspace.xmlworkflow.WorkflowRequirementsServiceImpl"/>
<bean class="org.dspace.xmlworkflow.XmlWorkflowFactoryImpl"/>-->
</beans>

View File

@@ -61,7 +61,9 @@
@updateseq.sql metadataschemaregistry_seq metadataschemaregistry metadata_schema_id ""
@updateseq.sql harvested_collection_seq harvested_collection id ""
@updateseq.sql harvested_item_seq harvested_item id ""
@updateseq.sql webapp_seq webapp id ""
@updateseq.sql webapp_seq webapp webapp_id ""
@updateseq.sql requestitem_seq requestitem requestitem_id ""
@updateseq.sql handle_id_seq handle handle_id ""
-- Handle Sequence is a special case. Since Handles minted by DSpace use the 'handle_seq',
-- we need to ensure the next assigned handle will *always* be unique. So, 'handle_seq'

View File

@@ -60,6 +60,7 @@ SELECT setval('harvested_collection_seq', max(id)) FROM harvested_collection;
SELECT setval('harvested_item_seq', max(id)) FROM harvested_item;
SELECT setval('webapp_seq', max(webapp_id)) FROM webapp;
SELECT setval('requestitem_seq', max(requestitem_id)) FROM requestitem;
SELECT setval('handle_id_seq', max(handle_id)) FROM handle;
-- Handle Sequence is a special case. Since Handles minted by DSpace use the 'handle_seq',
-- we need to ensure the next assigned handle will *always* be unique. So, 'handle_seq'