Merge branch 'DS-3292' into DS-1814

This commit is contained in:
Luigi Andrea Pascarelli
2016-08-20 20:29:49 +02:00
168 changed files with 2466 additions and 1115 deletions

View File

@@ -635,7 +635,7 @@
<dependency> <dependency>
<groupId>org.flywaydb</groupId> <groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId> <artifactId>flyway-core</artifactId>
<version>3.2.1</version> <version>4.0.3</version>
</dependency> </dependency>
<!-- Google Analytics --> <!-- Google Analytics -->

View File

@@ -631,7 +631,7 @@ public class DSpaceCSV implements Serializable
int c = 1; int c = 1;
while (i.hasNext()) while (i.hasNext())
{ {
csvLines[c++] = i.next().toCSV(headingsCopy, fieldSeparator); csvLines[c++] = i.next().toCSV(headingsCopy, fieldSeparator, valueSeparator);
} }
return csvLines; return csvLines;

View File

@@ -150,16 +150,17 @@ public class DSpaceCSVLine implements Serializable
* Write this line out as a CSV formatted string, in the order given by the headings provided * Write this line out as a CSV formatted string, in the order given by the headings provided
* *
* @param headings The headings which define the order the elements must be presented in * @param headings The headings which define the order the elements must be presented in
* @param fieldSeparator field separator * @param fieldSeparator separator between metadata fields
* @param valueSeparator separator between metadata values (within a field)
* @return The CSV formatted String * @return The CSV formatted String
*/ */
protected String toCSV(List<String> headings, String fieldSeparator) protected String toCSV(List<String> headings, String fieldSeparator, String valueSeparator)
{ {
StringBuilder bits = new StringBuilder(); StringBuilder bits = new StringBuilder();
// Add the id // Add the id
bits.append("\"").append(id).append("\"").append(fieldSeparator); bits.append("\"").append(id).append("\"").append(fieldSeparator);
bits.append(valueToCSV(items.get("collection"), fieldSeparator)); bits.append(valueToCSV(items.get("collection"),valueSeparator));
// Add the rest of the elements // Add the rest of the elements
for (String heading : headings) for (String heading : headings)
@@ -168,7 +169,7 @@ public class DSpaceCSVLine implements Serializable
List<String> values = items.get(heading); List<String> values = items.get(heading);
if (values != null && !"collection".equals(heading)) if (values != null && !"collection".equals(heading))
{ {
bits.append(valueToCSV(values, fieldSeparator)); bits.append(valueToCSV(values, valueSeparator));
} }
} }

View File

@@ -203,14 +203,11 @@ public class MetadataImport
} }
// Remove the item // Remove the item
List<Collection> owners = item.getCollections();
for (Collection owner : owners) if (change) {
{ itemService.delete(c, item);
if (change) }
{
collectionService.removeItem(c, owner, item);
}
}
whatHasChanged.setDeleted(); whatHasChanged.setDeleted();
} }
else if ("withdraw".equals(action)) else if ("withdraw".equals(action))

View File

@@ -1721,7 +1721,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
/** /**
* Generate a random filename based on current time * Generate a random filename based on current time
* @param hidden: add . as a prefix to make the file hidden * @param hidden set to add . as a prefix to make the file hidden
* @return the filename * @return the filename
*/ */
protected String generateRandomFilename(boolean hidden) protected String generateRandomFilename(boolean hidden)

View File

@@ -33,7 +33,7 @@ class, defined in this package, to select an event processing implementation.
</p> </p>
<p> <p>
Three "stock" implementations are provided. Several "stock" implementations are provided.
</p> </p>
<dl> <dl>
<dt>{@link org.dspace.usage.PassiveUsageEventListener PassiveUsageEventListener}</dt> <dt>{@link org.dspace.usage.PassiveUsageEventListener PassiveUsageEventListener}</dt>
@@ -42,9 +42,12 @@ Three "stock" implementations are provided.
if no plugin is configured.</dd> if no plugin is configured.</dd>
<dt>{@link org.dspace.usage.TabFileUsageEventListener TabFileUsageEventListener}</dt> <dt>{@link org.dspace.usage.TabFileUsageEventListener TabFileUsageEventListener}</dt>
<dd>writes event records to a file in Tab Separated Values format.</dd> <dd>writes event records to a file in Tab Separated Values format.</dd>
<dt>{@link org.dspace.app.statistics.UsageEventXMLLogger UsageEventXMLLogger}</dt> <dt>{@link org.dspace.usage.LoggerUsageEventListener LoggerUsageEventListener}</dt>
<dd>writes event records to a file in an XML format. Suitable mainly for <dd>writes event records to the Java logger.</dd>
testing.</dd> <dt>{@link org.dspace.statistics.SolrLoggerUsageEventListener SolrLoggerUsageEventListener}</dt>
<dd>writes event records to Solr.</dd>
<dt>{@link org.dspace.google.GoogleRecorderEventListener GoogleRecorderEventListener}<.dt>
<dd>writes event records to Google Analytics.</dd>
</dl> </dl>
</body> </body>
</html> </html>

View File

@@ -12,7 +12,10 @@ import java.util.*;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Community; import org.dspace.content.Community;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CommunityService;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
/** /**
* Utility class for lists of collections. * Utility class for lists of collections.
@@ -20,6 +23,9 @@ import org.dspace.core.ConfigurationManager;
public class CollectionDropDown { public class CollectionDropDown {
private static final CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
/** /**
* Get full path starting from a top-level community via subcommunities down to a collection. * Get full path starting from a top-level community via subcommunities down to a collection.
* The full path will not be truncated. * The full path will not be truncated.
@@ -29,9 +35,9 @@ public class CollectionDropDown {
* @return Full path to the collection * @return Full path to the collection
* @throws SQLException if database error * @throws SQLException if database error
*/ */
public static String collectionPath(Collection col) throws SQLException public static String collectionPath(Context context, Collection col) throws SQLException
{ {
return CollectionDropDown.collectionPath(col, 0); return CollectionDropDown.collectionPath(context, col, 0);
} }
/** /**
@@ -45,7 +51,7 @@ public class CollectionDropDown {
* @return Full path to the collection (truncated) * @return Full path to the collection (truncated)
* @throws SQLException if database error * @throws SQLException if database error
*/ */
public static String collectionPath(Collection col, int maxchars) throws SQLException public static String collectionPath(Context context, Collection col, int maxchars) throws SQLException
{ {
String separator = ConfigurationManager.getProperty("subcommunity.separator"); String separator = ConfigurationManager.getProperty("subcommunity.separator");
if (separator == null) if (separator == null)
@@ -55,7 +61,7 @@ public class CollectionDropDown {
List<Community> getCom = null; List<Community> getCom = null;
StringBuffer name = new StringBuffer(""); StringBuffer name = new StringBuffer("");
getCom = col.getCommunities(); // all communities containing given collection getCom = communityService.getAllParents(context, col); // all communities containing given collection
for (Community com : getCom) for (Community com : getCom)
{ {
name.insert(0, com.getName() + separator); name.insert(0, com.getName() + separator);
@@ -83,13 +89,13 @@ public class CollectionDropDown {
* @return A sorted array of collection path entries (essentially collection/path pairs). * @return A sorted array of collection path entries (essentially collection/path pairs).
* @throws SQLException In case there are problems annotating a collection with its path. * @throws SQLException In case there are problems annotating a collection with its path.
*/ */
public static CollectionPathEntry[] annotateWithPaths(List<Collection> collections) throws SQLException public static CollectionPathEntry[] annotateWithPaths(Context context, List<Collection> collections) throws SQLException
{ {
CollectionPathEntry[] result = new CollectionPathEntry[collections.size()]; CollectionPathEntry[] result = new CollectionPathEntry[collections.size()];
for (int i = 0; i < collections.size(); i++) for (int i = 0; i < collections.size(); i++)
{ {
Collection collection = collections.get(i); Collection collection = collections.get(i);
CollectionPathEntry entry = new CollectionPathEntry(collection, collectionPath(collection)); CollectionPathEntry entry = new CollectionPathEntry(collection, collectionPath(context, collection));
result[i] = entry; result[i] = entry;
} }
Arrays.sort(result); Arrays.sort(result);

View File

@@ -229,7 +229,7 @@ public class GoogleMetadata
* first-encountered instance of the field for this Item. * first-encountered instance of the field for this Item.
* *
* @param fieldName * @param fieldName
* @return * @return successful?
*/ */
protected boolean addSingleField(String fieldName) protected boolean addSingleField(String fieldName)
{ {
@@ -445,7 +445,7 @@ public class GoogleMetadata
* configuration. * configuration.
* *
* @param configFilter * @param configFilter
* @return * @return array of parsed options or null
*/ */
protected ArrayList<ArrayList<String>> parseOptions(String configFilter) protected ArrayList<ArrayList<String>> parseOptions(String configFilter)
{ {
@@ -1046,7 +1046,7 @@ public class GoogleMetadata
* </ul> * </ul>
* Additionally, this bitstream must be publicly viewable. * Additionally, this bitstream must be publicly viewable.
* @param item * @param item
* @return * @return a linkable bitstream or null if none found
* @throws SQLException if database error * @throws SQLException if database error
*/ */
protected Bitstream findLinkableFulltext(Item item) throws SQLException { protected Bitstream findLinkableFulltext(Item item) throws SQLException {
@@ -1201,7 +1201,7 @@ public class GoogleMetadata
* metadata practice. * metadata practice.
* *
* @param dConfig * @param dConfig
* @return * @return item matches configuration
*/ */
protected boolean identifyItemType(String dConfig) protected boolean identifyItemType(String dConfig)
{ {

View File

@@ -481,9 +481,6 @@ public class LDAPAuthentication
env.put(javax.naming.Context.SECURITY_AUTHENTICATION, "simple"); env.put(javax.naming.Context.SECURITY_AUTHENTICATION, "simple");
env.put(javax.naming.Context.SECURITY_PRINCIPAL, adminUser); env.put(javax.naming.Context.SECURITY_PRINCIPAL, adminUser);
env.put(javax.naming.Context.SECURITY_CREDENTIALS, adminPassword); env.put(javax.naming.Context.SECURITY_CREDENTIALS, adminPassword);
// Create initial context
ctx = new InitialLdapContext(env, null);
} }
} }
else else
@@ -491,6 +488,11 @@ public class LDAPAuthentication
// Use anonymous authentication // Use anonymous authentication
env.put(javax.naming.Context.SECURITY_AUTHENTICATION, "none"); env.put(javax.naming.Context.SECURITY_AUTHENTICATION, "none");
} }
if (ctx == null) {
// Create initial context
ctx = new InitialLdapContext(env, null);
}
Attributes matchAttrs = new BasicAttributes(true); Attributes matchAttrs = new BasicAttributes(true);
matchAttrs.put(new BasicAttribute(ldap_id_field, netid)); matchAttrs.put(new BasicAttribute(ldap_id_field, netid));

View File

@@ -14,12 +14,12 @@ import org.apache.solr.common.SolrInputDocument;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.MetadataValueService; import org.dspace.content.service.MetadataValueService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormatter; import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat; import org.joda.time.format.ISODateTimeFormat;
import org.springframework.beans.factory.annotation.Autowired;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.*; import java.util.*;
@@ -64,12 +64,6 @@ public class AuthorityValue {
*/ */
private Date lastModified; private Date lastModified;
@Autowired(required = true)
protected AuthorityTypes authorityTypes;
@Autowired(required = true)
protected MetadataValueService metadataValueService;
public AuthorityValue() { public AuthorityValue() {
} }
@@ -187,7 +181,7 @@ public class AuthorityValue {
public void updateItem(Context context, Item currentItem, MetadataValue value) throws SQLException, AuthorizeException { public void updateItem(Context context, Item currentItem, MetadataValue value) throws SQLException, AuthorizeException {
value.setValue(getValue()); value.setValue(getValue());
value.setAuthority(getId()); value.setAuthority(getId());
metadataValueService.update(context, value, true); ContentServiceFactory.getInstance().getMetadataValueService().update(context, value, true);
} }
/** /**
@@ -248,9 +242,9 @@ public class AuthorityValue {
} }
/** /**
* Provides a string that will be allow a this AuthorityType to be recognized and provides information to create a new instance to be created using public AuthorityValue newInstance(String info). * Provides a string that will allow this AuthorityType to be recognized and provides information to create a new instance to be created using public AuthorityValue newInstance(String info).
* See the implementation of com.atmire.org.dspace.authority.AuthorityValueGenerator#generateRaw(java.lang.String, java.lang.String) for more precisions. * See the implementation of {@link com.atmire.org.dspace.authority.AuthorityValueGenerator#generateRaw(java.lang.String, java.lang.String) AuthorityValueGenerator#generateRaw(java.lang.String, java.lang.String)} for more details.
* @return * @return see {@link org.dspace.authority.service.AuthorityValueService#GENERATE AuthorityValueService.GENERATE}
*/ */
public String generateString() { public String generateString() {
return AuthorityValueServiceImpl.GENERATE; return AuthorityValueServiceImpl.GENERATE;

View File

@@ -42,9 +42,15 @@ public class ResourcePolicy implements ReloadableEntity<Integer> {
@JoinColumn(name = "dspace_object") @JoinColumn(name = "dspace_object")
private DSpaceObject dSpaceObject; private DSpaceObject dSpaceObject;
/*
* {@see org.dspace.core.Constants#Constants Constants}
*/
@Column(name = "resource_type_id") @Column(name = "resource_type_id")
private int resourceTypeId; private int resourceTypeId;
/*
* {@see org.dspace.core.Constants#Constants Constants}
*/
@Column(name="action_id") @Column(name="action_id")
private int actionId; private int actionId;
@@ -293,4 +299,4 @@ public class ResourcePolicy implements ReloadableEntity<Integer> {
public void setRpDescription(String description){ public void setRpDescription(String description){
this.rpdescription = description; this.rpdescription = description;
} }
} }

View File

@@ -87,7 +87,7 @@ public class ResourcePolicyDAOImpl extends AbstractHibernateDAO<ResourcePolicy>
List<ResourcePolicy> results; List<ResourcePolicy> results;
if (notPolicyID != -1) if (notPolicyID != -1)
{ {
criteria.add(Restrictions.and(Restrictions.not(Restrictions.eq("id", action)))); criteria.add(Restrictions.and(Restrictions.not(Restrictions.eq("id", notPolicyID))));
} }
return list(criteria); return list(criteria);

View File

@@ -107,7 +107,7 @@ public interface AuthorizeService {
* @param o DSpaceObject * @param o DSpaceObject
* @param a action being attempted, from * @param a action being attempted, from
* <code>org.dspace.core.Constants</code> * <code>org.dspace.core.Constants</code>
* @return <code>true</code> if the current user in the context is * @return {@code true} if the current user in the context is
* authorized to perform the given action on the given object * authorized to perform the given action on the given object
* @throws SQLException if database error * @throws SQLException if database error
*/ */
@@ -124,7 +124,7 @@ public interface AuthorizeService {
* @param useInheritance * @param useInheritance
* flag to say if ADMIN action on the current object or parent * flag to say if ADMIN action on the current object or parent
* object can be used * object can be used
* @return <code>true</code> if the current user in the context is * @return {@code true} if the current user in the context is
* authorized to perform the given action on the given object * authorized to perform the given action on the given object
* @throws SQLException if database error * @throws SQLException if database error
*/ */
@@ -142,7 +142,7 @@ public interface AuthorizeService {
* @param useInheritance * @param useInheritance
* flag to say if ADMIN action on the current object or parent * flag to say if ADMIN action on the current object or parent
* object can be used * object can be used
* @return <code>true</code> if the requested user is * @return {@code true} if the requested user is
* authorized to perform the given action on the given object * authorized to perform the given action on the given object
* @throws SQLException if database error * @throws SQLException if database error
*/ */
@@ -154,14 +154,14 @@ public interface AuthorizeService {
/** /**
* Check to see if the current user is an Administrator of a given object * Check to see if the current user is an Administrator of a given object
* within DSpace. Always return <code>true</code> if the user is a System * within DSpace. Always return {@code true} if the user is a System
* Admin * Admin
* *
* @param c current context * @param c current context
* @param o current DSpace Object, if <code>null</code> the call will be * @param o current DSpace Object, if <code>null</code> the call will be
* equivalent to a call to the <code>isAdmin(Context c)</code> * equivalent to a call to the <code>isAdmin(Context c)</code>
* method * method
* @return <code>true</code> if user has administrative privileges on the * @return {@code true} if user has administrative privileges on the
* given DSpace object * given DSpace object
* @throws SQLException if database error * @throws SQLException if database error
*/ */
@@ -170,11 +170,11 @@ public interface AuthorizeService {
/** /**
* Check to see if the current user is a System Admin. Always return * Check to see if the current user is a System Admin. Always return
* <code>true</code> if c.ignoreAuthorization is set. Anonymous users * {@code true} if c.ignoreAuthorization is set. Anonymous users
* can't be Admins (EPerson set to NULL) * can't be Admins (EPerson set to NULL)
* *
* @param c current context * @param c current context
* @return <code>true</code> if user is an admin or ignore authorization * @return {@code true} if user is an admin or ignore authorization
* flag set * flag set
* @throws SQLException if database error * @throws SQLException if database error
*/ */
@@ -240,7 +240,7 @@ public interface AuthorizeService {
* *
* @param c current context * @param c current context
* @param o object to retrieve policies for * @param o object to retrieve policies for
* @return List of <code>ResourcePolicy</code> objects * @return List of {@code ResourcePolicy} objects
* @throws SQLException if database error * @throws SQLException if database error
*/ */
public List<ResourcePolicy> getPolicies(Context c, DSpaceObject o) throws SQLException; public List<ResourcePolicy> getPolicies(Context c, DSpaceObject o) throws SQLException;
@@ -251,7 +251,7 @@ public interface AuthorizeService {
* @param c current context * @param c current context
* @param o object to retrieve policies for * @param o object to retrieve policies for
* @param type type * @param type type
* @return List of <code>ResourcePolicy</code> objects * @return List of {@code ResourcePolicy} objects
* @throws SQLException if database error * @throws SQLException if database error
*/ */
public List<ResourcePolicy> findPoliciesByDSOAndType(Context c, DSpaceObject o, String type) throws SQLException; public List<ResourcePolicy> findPoliciesByDSOAndType(Context c, DSpaceObject o, String type) throws SQLException;
@@ -261,8 +261,8 @@ public interface AuthorizeService {
* *
* @param c current context * @param c current context
* @param g group to retrieve policies for * @param g group to retrieve policies for
* @return List of <code>ResourcePolicy</code> objects * @return List of {@code ResourcePolicy} objects
* @@throws SQLException if database error * @throws SQLException if database error
*/ */
public List<ResourcePolicy> getPoliciesForGroup(Context c, Group g) throws SQLException; public List<ResourcePolicy> getPoliciesForGroup(Context c, Group g) throws SQLException;
@@ -338,7 +338,7 @@ public interface AuthorizeService {
* @param context current context * @param context current context
* @param dso object to remove policies from * @param dso object to remove policies from
* @param actionID ID of action to match from * @param actionID ID of action to match from
* <code>org.dspace.core.Constants</code>, or -1=all * {@link org.dspace.core.Constants#Constants Constants}, or -1=all
* @throws SQLException if there's a database problem * @throws SQLException if there's a database problem
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
*/ */
@@ -384,8 +384,8 @@ public interface AuthorizeService {
* *
* @param c current context * @param c current context
* @param o object * @param o object
* @param actionID ID of action from <code>org.dspace.core.Constants</code> * @param actionID ID of action from {@link org.dspace.core.Constants#Constants Constants}
* @return array of <code>Group</code>s that can perform the specified * @return array of {@link org.dspace.eperson.Group#Group Groups} that can perform the specified
* action on the specified object * action on the specified object
* @throws SQLException if there's a database problem * @throws SQLException if there's a database problem
*/ */
@@ -400,7 +400,7 @@ public interface AuthorizeService {
* @param c current context * @param c current context
* @param o object * @param o object
* @param group group * @param group group
* @param actionID ID of action from <code>org.dspace.core.Constants</code> * @param actionID ID of action from {@link org.dspace.core.Constants#Constants Constants}
* @param policyID ID of an existing policy. If -1 is specified, this parameter will be ignored * @param policyID ID of an existing policy. If -1 is specified, this parameter will be ignored
* @return true if such a policy exists, false otherwise * @return true if such a policy exists, false otherwise
* @throws SQLException if there's a database problem * @throws SQLException if there's a database problem

View File

@@ -11,8 +11,10 @@ import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.StringTokenizer; import java.util.StringTokenizer;
import org.apache.commons.lang.ArrayUtils;
import org.dspace.core.ConfigurationManager; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
/** /**
* Class to mediate with the item list configuration * Class to mediate with the item list configuration
@@ -33,6 +35,9 @@ public class ItemListConfig
/** constant for a TEXT column */ /** constant for a TEXT column */
private static final int TEXT = 2; private static final int TEXT = 2;
private final transient ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
/** /**
* Create a new instance of the Item list configuration. This loads * Create a new instance of the Item list configuration. This loads
@@ -45,21 +50,19 @@ public class ItemListConfig
{ {
try try
{ {
String configLine = ConfigurationManager.getProperty("webui.itemlist.columns"); String[] browseFields = configurationService.getArrayProperty("webui.itemlist.columns");
if (configLine == null || "".equals(configLine)) if (ArrayUtils.isEmpty(browseFields))
{ {
throw new BrowseException("There is no configuration for webui.itemlist.columns"); throw new BrowseException("There is no configuration for webui.itemlist.columns");
} }
// parse the config // parse the config
StringTokenizer st = new StringTokenizer(configLine, ",");
int i = 1; int i = 1;
while (st.hasMoreTokens()) for(String token : browseFields)
{ {
Integer key = Integer.valueOf(i); Integer key = Integer.valueOf(i);
String token = st.nextToken();
// find out if the field is a date // find out if the field is a date
if (token.indexOf("(date)") > 0) if (token.indexOf("(date)") > 0)
{ {

View File

@@ -37,7 +37,7 @@
<p>Dispatchers that generate bitstream ordering: - <p>Dispatchers that generate bitstream ordering: -
<ul> <ul>
<li>{@link org.dspace.checker.ListDispatcher}</li> <li>{@link org.dspace.checker.IteratorDispatcher}</li>
<li>{@link org.dspace.checker.SimpleDispatcher}</li> <li>{@link org.dspace.checker.SimpleDispatcher}</li>
</ul> </ul>
</p> </p>

View File

@@ -306,7 +306,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
collection = item.getOwningCollection(); collection = item.getOwningCollection();
if (collection != null) if (collection != null)
{ {
community = collection.getCommunities().iterator().next(); community = collection.getCommunities().get(0);
} }
} }
switch (action) switch (action)

View File

@@ -123,14 +123,23 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
@Override @Override
public List<Collection> findAll(Context context) throws SQLException { public List<Collection> findAll(Context context) throws SQLException {
MetadataField nameField = metadataFieldService.findByElement(context, "dc", "title", null); MetadataField nameField = metadataFieldService.findByElement(context, MetadataSchema.DC_SCHEMA, "title", null);
return collectionDAO.findAll(context, nameField); if(nameField==null)
{
throw new IllegalArgumentException("Required metadata field '" + MetadataSchema.DC_SCHEMA + ".title' doesn't exist!");
}
return collectionDAO.findAll(context, nameField);
} }
@Override @Override
public List<Collection> findAll(Context context, Integer limit, Integer offset) throws SQLException { public List<Collection> findAll(Context context, Integer limit, Integer offset) throws SQLException {
MetadataField nameField = metadataFieldService.findByElement(context, "dc", "title", null); MetadataField nameField = metadataFieldService.findByElement(context, MetadataSchema.DC_SCHEMA, "title", null);
if(nameField==null)
{
throw new IllegalArgumentException("Required metadata field '" + MetadataSchema.DC_SCHEMA + ".title' doesn't exist!");
}
return collectionDAO.findAll(context, nameField, limit, offset); return collectionDAO.findAll(context, nameField, limit, offset);
} }
@@ -619,7 +628,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
@Override @Override
public void canEdit(Context context, Collection collection, boolean useInheritance) throws SQLException, AuthorizeException { public void canEdit(Context context, Collection collection, boolean useInheritance) throws SQLException, AuthorizeException {
List<Community> parents = collection.getCommunities(); List<Community> parents = communityService.getAllParents(context, collection);
for (Community parent : parents) { for (Community parent : parents) {
if (authorizeService.authorizeActionBoolean(context, parent, if (authorizeService.authorizeActionBoolean(context, parent,
Constants.WRITE, useInheritance)) { Constants.WRITE, useInheritance)) {
@@ -794,7 +803,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
List<Community> communities = collection.getCommunities(); List<Community> communities = collection.getCommunities();
if (CollectionUtils.isNotEmpty(communities)) if (CollectionUtils.isNotEmpty(communities))
{ {
community = communities.iterator().next(); community = communities.get(0);
} }
switch (action) switch (action)
@@ -827,7 +836,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
public DSpaceObject getParentObject(Context context, Collection collection) throws SQLException { public DSpaceObject getParentObject(Context context, Collection collection) throws SQLException {
List<Community> communities = collection.getCommunities(); List<Community> communities = collection.getCommunities();
if(CollectionUtils.isNotEmpty(communities)){ if(CollectionUtils.isNotEmpty(communities)){
return communities.iterator().next(); return communities.get(0);
}else{ }else{
return null; return null;
} }

View File

@@ -137,12 +137,22 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
@Override @Override
public List<Community> findAll(Context context) throws SQLException { public List<Community> findAll(Context context) throws SQLException {
MetadataField sortField = metadataFieldService.findByElement(context, MetadataSchema.DC_SCHEMA, "title", null); MetadataField sortField = metadataFieldService.findByElement(context, MetadataSchema.DC_SCHEMA, "title", null);
if(sortField==null)
{
throw new IllegalArgumentException("Required metadata field '" + MetadataSchema.DC_SCHEMA + ".title' doesn't exist!");
}
return communityDAO.findAll(context, sortField); return communityDAO.findAll(context, sortField);
} }
@Override @Override
public List<Community> findAll(Context context, Integer limit, Integer offset) throws SQLException { public List<Community> findAll(Context context, Integer limit, Integer offset) throws SQLException {
MetadataField nameField = metadataFieldService.findByElement(context, MetadataSchema.DC_SCHEMA, "title", null); MetadataField nameField = metadataFieldService.findByElement(context, MetadataSchema.DC_SCHEMA, "title", null);
if(nameField==null)
{
throw new IllegalArgumentException("Required metadata field '" + MetadataSchema.DC_SCHEMA + ".title' doesn't exist!");
}
return communityDAO.findAll(context, nameField, limit, offset); return communityDAO.findAll(context, nameField, limit, offset);
} }
@@ -151,6 +161,11 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
{ {
// get all communities that are not children // get all communities that are not children
MetadataField sortField = metadataFieldService.findByElement(context, MetadataSchema.DC_SCHEMA, "title", null); MetadataField sortField = metadataFieldService.findByElement(context, MetadataSchema.DC_SCHEMA, "title", null);
if(sortField==null)
{
throw new IllegalArgumentException("Required metadata field '" + MetadataSchema.DC_SCHEMA + ".title' doesn't exist!");
}
return communityDAO.findAllNoParent(context, sortField); return communityDAO.findAllNoParent(context, sortField);
} }
@@ -308,6 +323,17 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
return parentList; return parentList;
} }
@Override
public List<Community> getAllParents(Context context, Collection collection) throws SQLException {
List<Community> result = new ArrayList<>();
List<Community> communities = collection.getCommunities();
result.addAll(communities);
for (Community community : communities) {
result.addAll(getAllParents(context, community));
}
return result;
}
@Override @Override
public List<Collection> getAllCollections(Context context, Community community) throws SQLException { public List<Collection> getAllCollections(Context context, Community community) throws SQLException {
List<Collection> collectionList = new ArrayList<Collection>(); List<Collection> collectionList = new ArrayList<Collection>();

View File

@@ -207,7 +207,7 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang, List<String> values) throws SQLException { public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang, List<String> values) throws SQLException {
MetadataField metadataField = metadataFieldService.findByElement(context, schema, element, qualifier); MetadataField metadataField = metadataFieldService.findByElement(context, schema, element, qualifier);
if (metadataField == null) { if (metadataField == null) {
throw new SQLException("bad_dublin_core schema=" + schema + "." + element + "." + qualifier); throw new SQLException("bad_dublin_core schema=" + schema + "." + element + "." + qualifier + ". Metadata field does not exist!");
} }
addMetadata(context, dso, metadataField, lang, values); addMetadata(context, dso, metadataField, lang, values);
@@ -219,7 +219,7 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
// until update() is called. // until update() is called.
MetadataField metadataField = metadataFieldService.findByElement(context, schema, element, qualifier); MetadataField metadataField = metadataFieldService.findByElement(context, schema, element, qualifier);
if (metadataField == null) { if (metadataField == null) {
throw new SQLException("bad_dublin_core schema=" + schema + "." + element + "." + qualifier); throw new SQLException("bad_dublin_core schema=" + schema + "." + element + "." + qualifier + ". Metadata field does not exist!");
} }
addMetadata(context, dso, metadataField, lang, values, authorities, confidences); addMetadata(context, dso, metadataField, lang, values, authorities, confidences);
} }
@@ -332,16 +332,15 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
@Override @Override
public void clearMetadata(Context context, T dso, String schema, String element, String qualifier, String lang) throws SQLException { public void clearMetadata(Context context, T dso, String schema, String element, String qualifier, String lang) throws SQLException {
// We will build a list of values NOT matching the values to clear
Iterator<MetadataValue> metadata = dso.getMetadata().iterator(); Iterator<MetadataValue> metadata = dso.getMetadata().iterator();
while (metadata.hasNext()) while (metadata.hasNext())
{ {
MetadataValue metadataValue = metadata.next(); MetadataValue metadataValue = metadata.next();
// If this value matches, delete it
if (match(schema, element, qualifier, lang, metadataValue)) if (match(schema, element, qualifier, lang, metadataValue))
{ {
metadataValue.setDSpaceObject(null);
metadata.remove(); metadata.remove();
// metadataValueService.delete(context, metadataValue); metadataValueService.delete(context, metadataValue);
} }
} }
dso.setMetadataModified(); dso.setMetadataModified();
@@ -355,7 +354,7 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
if(values.contains(metadataValue)) if(values.contains(metadataValue))
{ {
metadata.remove(); metadata.remove();
// metadataValueService.delete(context, metadataValue); metadataValueService.delete(context, metadataValue);
} }
} }
dso.setMetadataModified(); dso.setMetadataModified();
@@ -364,11 +363,17 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
/** /**
* Retrieve first metadata field value * Retrieve first metadata field value
* @param dso * @param dso
* @param language * The DSpaceObject which we ask for metadata.
* @param element
* @param schema * @param schema
* the schema for the metadata field. <em>Must</em> match
* the <code>name</code> of an existing metadata schema.
* @param element
* the element to match, or <code>Item.ANY</code>
* @param qualifier * @param qualifier
* @return * the qualifier to match, or <code>Item.ANY</code>
* @param language
* the language to match, or <code>Item.ANY</code>
* @return the first metadata field value
*/ */
@Override @Override
public String getMetadataFirstValue(T dso, String schema, String element, String qualifier, String language){ public String getMetadataFirstValue(T dso, String schema, String element, String qualifier, String language){

View File

@@ -225,7 +225,7 @@ public class ItemComparator implements Comparator, Serializable
/** /**
* Normalize the title of a Metadatum. * Normalize the title of a Metadatum.
* @param value * @param value
* @return * @return normalized title
*/ */
protected String normalizeTitle(MetadataValue value) protected String normalizeTitle(MetadataValue value)
{ {

View File

@@ -206,6 +206,11 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
public Iterator<Item> findBySubmitterDateSorted(Context context, EPerson eperson, Integer limit) throws SQLException { public Iterator<Item> findBySubmitterDateSorted(Context context, EPerson eperson, Integer limit) throws SQLException {
MetadataField metadataField = metadataFieldService.findByElement(context, MetadataSchema.DC_SCHEMA, "date", "accessioned"); MetadataField metadataField = metadataFieldService.findByElement(context, MetadataSchema.DC_SCHEMA, "date", "accessioned");
if(metadataField==null)
{
throw new IllegalArgumentException("Required metadata field '" + MetadataSchema.DC_SCHEMA + ".date.accessioned' doesn't exist!");
}
return itemDAO.findBySubmitter(context, eperson, metadataField, limit); return itemDAO.findBySubmitter(context, eperson, metadataField, limit);
} }
@@ -250,11 +255,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
List<Community> result = new ArrayList<>(); List<Community> result = new ArrayList<>();
List<Collection> collections = item.getCollections(); List<Collection> collections = item.getCollections();
for (Collection collection : collections) { for (Collection collection : collections) {
List<Community> owningCommunities = collection.getCommunities(); result.addAll(communityService.getAllParents(context, collection));
for (Community community : owningCommunities) {
result.add(community);
result.addAll(communityService.getAllParents(context, community));
}
} }
return result; return result;

View File

@@ -20,6 +20,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List; import java.util.List;
import org.apache.commons.collections.CollectionUtils;
/** /**
* Service implementation for the MetadataField object. * Service implementation for the MetadataField object.
@@ -140,11 +141,29 @@ public class MetadataFieldServiceImpl implements MetadataFieldService {
"Only administrators may modify the metadata registry"); "Only administrators may modify the metadata registry");
} }
// Check for existing usages of this field
List<MetadataValue> values = null;
try
{
values = metadataValueService.findByField(context, metadataField);
}
catch(IOException io)
{
// ignore
}
// Only remove this field if it is NOT in use (as we don't want to bulk delete metadata values)
if(CollectionUtils.isEmpty(values))
{
metadataFieldDAO.delete(context, metadataField);
}
else
{
throw new IllegalStateException("Metadata field " + metadataField.toString() + " cannot be deleted as it is currently used by one or more objects.");
}
log.info(LogManager.getHeader(context, "delete_metadata_field", log.info(LogManager.getHeader(context, "delete_metadata_field",
"metadata_field_id=" + metadataField.getID())); "metadata_field_id=" + metadataField.getID()));
metadataValueService.deleteByMetadataField(context, metadataField);
metadataFieldDAO.delete(context, metadataField);
} }
/** /**

View File

@@ -475,179 +475,136 @@ public class METSRightsCrosswalk
{ {
//get what class of context this is //get what class of context this is
String contextClass = element.getAttributeValue("CONTEXTCLASS"); String contextClass = element.getAttributeValue("CONTEXTCLASS");
if ((element.getAttributeValue("start-date") != null) ResourcePolicy rp = resourcePolicyService.create(context);
|| (element.getAttributeValue("end-date") != null) SimpleDateFormat sdf = new SimpleDateFormat( "yyyy-MM-dd" );
|| (element.getAttributeValue("rpName") != null))
{ // get reference to the <Permissions> element
SimpleDateFormat sdf = new SimpleDateFormat( "yyyy-MM-dd" ); // Note: we are assuming here that there will only ever be ONE <Permissions>
try { // element. Currently there are no known use cases for multiple.
ResourcePolicy rp = resourcePolicyService.create(context); Element permsElement = element.getChild("Permissions", METSRights_NS);
if (element.getAttributeValue("CONTEXTCLASS").equalsIgnoreCase("GENERAL PUBLIC")) { if(permsElement == null) {
Group anonGroup = groupService.findByName(context, Group.ANONYMOUS); log.error("No <Permissions> element was found. Skipping this <Context> element.");
rp.setGroup(anonGroup); continue;
}
else
{
if (element.getAttributeValue("CONTEXTCLASS").equalsIgnoreCase("REPOSITORY MGR")) {
Group adminGroup = groupService.findByName(context, Group.ADMIN);
rp.setGroup(adminGroup);
}
}
if (element.getAttributeValue("rpName") != null)
{
rp.setRpName(element.getAttributeValue("rpName"));
}
try {
if (element.getAttributeValue("start-date") != null)
{
rp.setStartDate(sdf.parse(element.getAttributeValue("start-date")));
}
if (element.getAttributeValue("end-date") != null)
{
rp.setEndDate(sdf.parse(element.getAttributeValue("end-date")));
}
}catch (ParseException ex) {
java.util.logging.Logger.getLogger(METSRightsCrosswalk.class.getName()).log(Level.SEVERE, null, ex);
}
List<Element> le = new ArrayList<Element>(element.getChildren());
for (Element el : le)
{
if ((el.getAttributeValue("DISCOVER").equalsIgnoreCase("true"))
&& (el.getAttributeValue("DISPLAY").equalsIgnoreCase("true")))
{
if (el.getAttributeValue("DELETE").equalsIgnoreCase("false"))
{
if (el.getAttributeValue("MODIFY").equalsIgnoreCase("false"))
{
rp.setAction(Constants.READ);
}
else
{
rp.setAction(Constants.WRITE);
}
}
else
{
if (el.getAttributeValue("MODIFY").equalsIgnoreCase("true"))
{
rp.setAction(Constants.DELETE);
if ((el.getAttributeValue("COPY").equalsIgnoreCase("true"))
&&(el.getAttributeValue("DUPLICATE").equalsIgnoreCase("true"))
&&(el.getAttributeValue("PRINT").equalsIgnoreCase("true")))
{
rp.setAction(Constants.ADMIN);
}
}
}
}
}
policies.add(rp);
} catch (NullPointerException ex) {
java.util.logging.Logger.getLogger(METSRightsCrosswalk.class.getName()).log(Level.SEVERE, null, ex);
}
assignPermissions(context, dso, policies);
} }
else
if (element.getAttributeValue("rpName") != null)
{ {
//also get reference to the <Permissions> element rp.setRpName(element.getAttributeValue("rpName"));
Element permsElement = element.getChild("Permissions", METSRights_NS); }
try {
//Check if this permission pertains to Anonymous users if (element.getAttributeValue("start-date") != null)
if(ANONYMOUS_CONTEXTCLASS.equals(contextClass))
{ {
//get DSpace Anonymous group, ID=0 rp.setStartDate(sdf.parse(element.getAttributeValue("start-date")));
Group anonGroup = groupService.findByName(context, Group.ANONYMOUS); }
if(anonGroup==null) if (element.getAttributeValue("end-date") != null)
{
throw new CrosswalkInternalException("The DSpace database has not been properly initialized. The Anonymous Group is missing from the database.");
}
assignPermissions(context, dso, anonGroup, permsElement);
} // else if this permission declaration pertains to Administrators
else if(ADMIN_CONTEXTCLASS.equals(contextClass))
{ {
//get DSpace Administrator group, ID=1 rp.setEndDate(sdf.parse(element.getAttributeValue("end-date")));
Group adminGroup = groupService.findByName(context, Group.ADMIN); }
if(adminGroup==null) }catch (ParseException ex) {
{ log.error("Failed to parse embargo date. The date needs to be in the format 'yyyy-MM-dd'.", ex);
throw new CrosswalkInternalException("The DSpace database has not been properly initialized. The Administrator Group is missing from the database."); }
}
//Check if this permission pertains to Anonymous users
assignPermissions(context, dso, adminGroup, permsElement); if(ANONYMOUS_CONTEXTCLASS.equals(contextClass))
} // else if this permission pertains to another DSpace group {
else if(GROUP_CONTEXTCLASS.equals(contextClass)) //get DSpace Anonymous group, ID=0
Group anonGroup = groupService.findByName(context, Group.ANONYMOUS);
if(anonGroup==null)
{ {
try throw new CrosswalkInternalException("The DSpace database has not been properly initialized. The Anonymous Group is missing from the database.");
{ }
//we need to find the name of DSpace group it pertains to
//Get the text within the <UserName> child element,
// this is the group's name
String groupName = element.getChildTextTrim("UserName", METSRights_NS);
//Translate Group name back to internal ID format (e.g. COLLECTION_<ID>_ADMIN) rp.setGroup(anonGroup);
// from its external format (e.g. COLLECTION_<handle>_ADMIN) } // else if this permission declaration pertains to Administrators
groupName = PackageUtils.translateGroupNameForImport(context, groupName); else if(ADMIN_CONTEXTCLASS.equals(contextClass))
{
//Check if this group exists in DSpace already //get DSpace Administrator group, ID=1
Group group = groupService.findByName(context, groupName); Group adminGroup = groupService.findByName(context, Group.ADMIN);
if(adminGroup==null)
//if not found, throw an error -- user should restore group from the SITE AIP
if(group==null)
{
throw new CrosswalkInternalException("Cannot restore Group permissions on object ("
+ "type=" + Constants.typeText[dso.getType()] + ", "
+ "handle=" + dso.getHandle() + ", "
+ "ID=" + dso.getID()
+ "). The Group named '" + groupName + "' is missing from DSpace. "
+ "Please restore this group using the SITE AIP, or recreate it.");
}
//assign permissions to group on this object
assignPermissions(context, dso, group, permsElement);
}
catch(PackageException pe)
{
//A PackageException will only be thrown if translateDefaultGroupName() fails
//We'll just wrap it as a CrosswalkException and throw it upwards
throw new CrosswalkException(pe);
}
}//end if Group
else if(PERSON_CONTEXTCLASS.equals(contextClass))
{ {
//we need to find the person it pertains to throw new CrosswalkInternalException("The DSpace database has not been properly initialized. The Administrator Group is missing from the database.");
// Get the text within the <UserName> child element, }
// this is the person's email address
String personEmail = element.getChildTextTrim("UserName", METSRights_NS);
//Check if this person exists in DSpace already rp.setGroup(adminGroup);
EPerson person = ePersonService.findByEmail(context, personEmail); } // else if this permission pertains to another DSpace group
else if(GROUP_CONTEXTCLASS.equals(contextClass))
{
try
{
//we need to find the name of DSpace group it pertains to
//Get the text within the <UserName> child element,
// this is the group's name
String groupName = element.getChildTextTrim("UserName", METSRights_NS);
//If cannot find by email, try by netID //Translate Group name back to internal ID format (e.g. COLLECTION_<ID>_ADMIN)
//(though METSRights should contain email if it was exported by DSpace) // from its external format (e.g. COLLECTION_<handle>_ADMIN)
if(person==null) groupName = PackageUtils.translateGroupNameForImport(context, groupName);
//Check if this group exists in DSpace already
Group group = groupService.findByName(context, groupName);
//if not found, throw an error -- user should restore group from the SITE AIP
if(group==null)
{ {
person = ePersonService.findByNetid(context, personEmail); throw new CrosswalkInternalException("Cannot restore Group permissions on object ("
}
//if not found, throw an error -- user should restore person from the SITE AIP
if(person==null)
{
throw new CrosswalkInternalException("Cannot restore Person permissions on object ("
+ "type=" + Constants.typeText[dso.getType()] + ", " + "type=" + Constants.typeText[dso.getType()] + ", "
+ "handle=" + dso.getHandle() + ", " + "handle=" + dso.getHandle() + ", "
+ "ID=" + dso.getID() + "ID=" + dso.getID()
+ "). The Person with email/netid '" + personEmail + "' is missing from DSpace. " + "). The Group named '" + groupName + "' is missing from DSpace. "
+ "Please restore this Person object using the SITE AIP, or recreate it."); + "Please restore this group using the SITE AIP, or recreate it.");
} }
//assign permissions to person on this object //assign group to policy
assignPermissions(context, dso, person, permsElement); rp.setGroup(group);
}//end if Person }
else catch(PackageException pe)
log.error("Unrecognized CONTEXTCLASS: " + contextClass); {
//A PackageException will only be thrown if translateDefaultGroupName() fails
//We'll just wrap it as a CrosswalkException and throw it upwards
throw new CrosswalkException(pe);
}
}// else if this permission pertains to a DSpace person
else if(PERSON_CONTEXTCLASS.equals(contextClass))
{
//we need to find the person it pertains to
// Get the text within the <UserName> child element,
// this is the person's email address
String personEmail = element.getChildTextTrim("UserName", METSRights_NS);
//Check if this person exists in DSpace already
EPerson person = ePersonService.findByEmail(context, personEmail);
//If cannot find by email, try by netID
//(though METSRights should contain email if it was exported by DSpace)
if(person==null)
{
person = ePersonService.findByNetid(context, personEmail);
}
//if not found, throw an error -- user should restore person from the SITE AIP
if(person==null)
{
throw new CrosswalkInternalException("Cannot restore Person permissions on object ("
+ "type=" + Constants.typeText[dso.getType()] + ", "
+ "handle=" + dso.getHandle() + ", "
+ "ID=" + dso.getID()
+ "). The Person with email/netid '" + personEmail + "' is missing from DSpace. "
+ "Please restore this Person object using the SITE AIP, or recreate it.");
}
//assign person to the policy
rp.setEPerson(person);
}//end if Person
else {
log.error("Unrecognized CONTEXTCLASS: " + contextClass);
} }
//set permissions on policy and add to object
rp.setAction(parsePermissions(permsElement));
policies.add(rp);
assignPermissions(context, dso, policies);
} //end if "Context" element } //end if "Context" element
}//end while loop }//end while loop
} }

View File

@@ -1194,9 +1194,9 @@ public class METSManifest
} }
/** /**
* Get an aray of all AMDID values for this object * Get an array of all AMDID values for this object
* *
* @return * @return array of all AMDID values for this object
* @throws MetadataValidationException if metadata validation error * @throws MetadataValidationException if metadata validation error
*/ */
protected String[] getAmdIDs() protected String[] getAmdIDs()

View File

@@ -14,6 +14,8 @@ import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.*; import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@@ -815,12 +817,21 @@ public class PackageUtils
public static String translateGroupNameForExport(Context context, String groupName) public static String translateGroupNameForExport(Context context, String groupName)
throws PackageException throws PackageException
{ {
// See if this resembles a default Group name Pattern defaultGroupNamePattern = Pattern.compile("^([^_]+)_([^_]+)_(.+)$");
// Check if this looks like a default Group name
Matcher matcher = defaultGroupNamePattern.matcher(groupName);
if(!matcher.matches())
{
//if this is not a valid default group name, just return group name as-is (no crosswalking necessary)
return groupName;
}
String objID = StringUtils.substringBetween(groupName, "_", "_"); String objTypeText = matcher.group(1);
int objType = StringUtils.startsWith(groupName, "COLLECTION_") ? Constants.COLLECTION : (StringUtils.startsWith(groupName, "COMMUNITY_") ? Constants.COMMUNITY : -1); String objID = matcher.group(2);
String groupType = StringUtils.substringAfterLast(groupName, "_"); String groupType = matcher.group(3);
if (objID == null && objType != -1)
int objType = Constants.getTypeID(objTypeText);
if (objID == null || objType == -1)
return groupName; return groupName;
@@ -863,7 +874,7 @@ public class PackageUtils
//Create an updated group name, using the Handle to replace the InternalID //Create an updated group name, using the Handle to replace the InternalID
// Format: <DSpace-Obj-Type>_hdl:<Handle>_<Group-Type> // Format: <DSpace-Obj-Type>_hdl:<Handle>_<Group-Type>
return objType + "_" + "hdl:" + dso.getHandle() + "_" + groupType; return objTypeText + "_" + "hdl:" + dso.getHandle() + "_" + groupType;
} }
catch (SQLException sqle) catch (SQLException sqle)
{ {

View File

@@ -189,6 +189,13 @@ public interface CommunityService extends DSpaceObjectService<Community>, DSpace
*/ */
public List<Community> getAllParents(Context context, Community community) throws SQLException; public List<Community> getAllParents(Context context, Community community) throws SQLException;
/**
* Return an array of parent communities of this collection.
*
* @return an array of parent communities
*/
public List<Community> getAllParents(Context context, Collection collection) throws SQLException;
/** /**
* Return an array of collections of this community and its subcommunities * Return an array of collections of this community and its subcommunities
* *

View File

@@ -95,11 +95,11 @@ public interface ItemService extends DSpaceObjectService<Item>, DSpaceObjectLega
throws SQLException; throws SQLException;
/** /**
* Retrieve the list of Items submitted by eperson, ordered by recently submitted, optionally limitable * Retrieve the list of items submitted by eperson, ordered by recently submitted, optionally limitable
* @param context context * @param context context
* @param eperson eperson * @param eperson eperson
* @param limit a positive integer to limit, -1 or null for unlimited * @param limit a positive integer to limit, -1 or null for unlimited
* @return * @return an iterator over the items submitted by eperson
* @throws SQLException if database error * @throws SQLException if database error
*/ */
public Iterator<Item> findBySubmitterDateSorted(Context context, EPerson eperson, Integer limit) throws SQLException; public Iterator<Item> findBySubmitterDateSorted(Context context, EPerson eperson, Integer limit) throws SQLException;

View File

@@ -91,7 +91,7 @@ public interface MetadataValueService {
* *
* @param context * @param context
* @param metadataFieldId unique identifier of the interesting field. * @param metadataFieldId unique identifier of the interesting field.
* @return * @return the minimum value of the metadata field
* @throws SQLException if database error * @throws SQLException if database error
*/ */
public MetadataValue getMinimum(Context context, int metadataFieldId) public MetadataValue getMinimum(Context context, int metadataFieldId)

View File

@@ -143,7 +143,7 @@ public interface WorkspaceItemService extends InProgressSubmissionService<Worksp
/** /**
* The map entry returned contains stage reached as the key and count of items in that stage as a value * The map entry returned contains stage reached as the key and count of items in that stage as a value
* @param context * @param context
* @return * @return the map
* @throws SQLException if database error * @throws SQLException if database error
*/ */
List<Map.Entry<Integer, Long>> getStageReachedCounts(Context context) throws SQLException; List<Map.Entry<Integer, Long>> getStageReachedCounts(Context context) throws SQLException;

View File

@@ -94,8 +94,8 @@ public abstract class AbstractHibernateDAO<T> implements GenericDAO<T> {
* Execute a JPA Criteria query and return a collection of results. * Execute a JPA Criteria query and return a collection of results.
* *
* @param context * @param context
* @param query * @param query JPQL query string
* @return * @return list of DAOs specified by the query string
* @throws SQLException if database error * @throws SQLException if database error
*/ */
public List<T> findMany(Context context, Query query) throws SQLException { public List<T> findMany(Context context, Query query) throws SQLException {
@@ -135,7 +135,7 @@ public abstract class AbstractHibernateDAO<T> implements GenericDAO<T> {
* retrieved an exception will be thrown, * retrieved an exception will be thrown,
* so only use when the criteria state uniqueness in the database. * so only use when the criteria state uniqueness in the database.
* @param criteria * @param criteria
* @return * @return a DAO specified by the criteria
*/ */
public T uniqueResult(Criteria criteria) public T uniqueResult(Criteria criteria)
{ {
@@ -148,7 +148,7 @@ public abstract class AbstractHibernateDAO<T> implements GenericDAO<T> {
* Retrieve a single result from the query. Best used if you expect a * Retrieve a single result from the query. Best used if you expect a
* single result, but this isn't enforced on the database. * single result, but this isn't enforced on the database.
* @param criteria * @param criteria
* @return * @return a DAO specified by the criteria
*/ */
public T singleResult(Criteria criteria) public T singleResult(Criteria criteria)
{ {

View File

@@ -16,6 +16,7 @@ import org.dspace.event.Event;
import org.dspace.event.factory.EventServiceFactory; import org.dspace.event.factory.EventServiceFactory;
import org.dspace.event.service.EventService; import org.dspace.event.service.EventService;
import org.dspace.storage.rdbms.DatabaseConfigVO; import org.dspace.storage.rdbms.DatabaseConfigVO;
import org.dspace.storage.rdbms.DatabaseUtils;
import org.dspace.utils.DSpace; import org.dspace.utils.DSpace;
import org.springframework.util.CollectionUtils; import org.springframework.util.CollectionUtils;
@@ -83,6 +84,21 @@ public class Context
private DBConnection dbConnection; private DBConnection dbConnection;
static
{
// Before initializing a Context object, we need to ensure the database
// is up-to-date. This ensures any outstanding Flyway migrations are run
// PRIOR to Hibernate initializing (occurs when DBConnection is loaded in init() below).
try
{
DatabaseUtils.updateDatabase();
}
catch(SQLException sqle)
{
log.fatal("Cannot initialize database via Flyway!", sqle);
}
}
protected Context(EventService eventService, DBConnection dbConnection) { protected Context(EventService eventService, DBConnection dbConnection) {
this.eventService = eventService; this.eventService = eventService;
this.dbConnection = dbConnection; this.dbConnection = dbConnection;
@@ -93,9 +109,6 @@ public class Context
/** /**
* Construct a new context object with default options. A database connection is opened. * Construct a new context object with default options. A database connection is opened.
* No user is authenticated. * No user is authenticated.
*
* @exception SQLException
* if there was an error obtaining a database connection
*/ */
public Context() public Context()
{ {

View File

@@ -34,7 +34,7 @@ public interface GenericDAO<T>
* *
* @param context * @param context
* @param clazz the desired type. * @param clazz the desired type.
* @return * @return list of DAOs of the same type as clazz
* @throws SQLException if database error * @throws SQLException if database error
*/ */
public List<T> findAll(Context context, Class<T> clazz) throws SQLException; public List<T> findAll(Context context, Class<T> clazz) throws SQLException;
@@ -43,8 +43,8 @@ public interface GenericDAO<T>
* Execute a JPQL query returning a unique result. * Execute a JPQL query returning a unique result.
* *
* @param context * @param context
* @param query * @param query JPQL query string
* @return * @return a DAO specified by the query string
* @throws SQLException if database error * @throws SQLException if database error
*/ */
public T findUnique(Context context, String query) throws SQLException; public T findUnique(Context context, String query) throws SQLException;
@@ -57,8 +57,8 @@ public interface GenericDAO<T>
* Execute a JPQL query and return a collection of results. * Execute a JPQL query and return a collection of results.
* *
* @param context * @param context
* @param query * @param query JPQL query string
* @return * @return list of DAOs specified by the query string
* @throws SQLException if database error * @throws SQLException if database error
*/ */
public List<T> findMany(Context context, String query) throws SQLException; public List<T> findMany(Context context, String query) throws SQLException;

View File

@@ -58,7 +58,7 @@ import org.dspace.services.factory.DSpaceServicesFactory;
* {@code My descriptor $td=ruby|rubytask.rb|LinkChecker.new} * {@code My descriptor $td=ruby|rubytask.rb|LinkChecker.new}
* *
* For portability, the {@code <relFilePath>} component may be omitted in this context. * For portability, the {@code <relFilePath>} component may be omitted in this context.
* Thus, {@code$td=ruby||LinkChecker.new} will be expanded to a descriptor * Thus, {@code $td=ruby||LinkChecker.new} will be expanded to a descriptor
* with the name of the embedding file. * with the name of the embedding file.
* *
* @author richardrodgers * @author richardrodgers

View File

@@ -677,10 +677,10 @@ public class SolrServiceImpl implements SearchService, IndexingService {
return locations; return locations;
} }
protected List<String> getCollectionLocations(Collection target) throws SQLException { protected List<String> getCollectionLocations(Context context, Collection target) throws SQLException {
List<String> locations = new Vector<String>(); List<String> locations = new Vector<String>();
// build list of community ids // build list of community ids
List<Community> communities = target.getCommunities(); List<Community> communities = communityService.getAllParents(context, target);
// now put those into strings // now put those into strings
for (Community community : communities) for (Community community : communities)
@@ -798,7 +798,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
*/ */
protected void buildDocument(Context context, Collection collection) protected void buildDocument(Context context, Collection collection)
throws SQLException, IOException { throws SQLException, IOException {
List<String> locations = getCollectionLocations(collection); List<String> locations = getCollectionLocations(context, collection);
// Create Lucene Document // Create Lucene Document
SolrInputDocument doc = buildDocument(Constants.COLLECTION, collection.getID(), SolrInputDocument doc = buildDocument(Constants.COLLECTION, collection.getID(),
@@ -1524,8 +1524,12 @@ public class SolrServiceImpl implements SearchService, IndexingService {
public String locationToName(Context context, String field, String value) throws SQLException { public String locationToName(Context context, String field, String value) throws SQLException {
if("location.comm".equals(field) || "location.coll".equals(field)) if("location.comm".equals(field) || "location.coll".equals(field))
{ {
int type = field.equals("location.comm") ? Constants.COMMUNITY : Constants.COLLECTION; int type = ("location.comm").equals(field) ? Constants.COMMUNITY : Constants.COLLECTION;
DSpaceObject commColl = contentServiceFactory.getDSpaceObjectService(type).find(context, UUID.fromString(value)); DSpaceObject commColl = null;
if (StringUtils.isNotBlank(value))
{
commColl = contentServiceFactory.getDSpaceObjectService(type).find(context, UUID.fromString(value));
}
if(commColl != null) if(commColl != null)
{ {
return commColl.getName(); return commColl.getName();
@@ -2017,7 +2021,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
DiscoverFilterQuery result = new DiscoverFilterQuery(); DiscoverFilterQuery result = new DiscoverFilterQuery();
StringBuilder filterQuery = new StringBuilder(); StringBuilder filterQuery = new StringBuilder();
if(StringUtils.isNotBlank(field)) if(StringUtils.isNotBlank(field) && StringUtils.isNotBlank(value))
{ {
filterQuery.append(field); filterQuery.append(field);
if("equals".equals(operator)) if("equals".equals(operator))
@@ -2069,10 +2073,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
} }
} }
result.setDisplayedValue(transformDisplayedValue(context, field, value));
} }
result.setDisplayedValue(transformDisplayedValue(context, field, value));
result.setFilterQuery(filterQuery.toString()); result.setFilterQuery(filterQuery.toString());
return result; return result;
} }

View File

@@ -69,7 +69,12 @@ public class SolrServiceResourceRestrictionPlugin implements SolrServiceIndexPlu
if(!authorizeService.isAdmin(context)){ if(!authorizeService.isAdmin(context)){
StringBuilder resourceQuery = new StringBuilder(); StringBuilder resourceQuery = new StringBuilder();
//Always add the anonymous group id to the query //Always add the anonymous group id to the query
resourceQuery.append("read:(g0"); Group anonymousGroup = groupService.findByName(context,Group.ANONYMOUS);
String anonGroupId = "";
if(anonymousGroup!=null){
anonGroupId = anonymousGroup.getID().toString();
}
resourceQuery.append("read:(g"+anonGroupId);
EPerson currentUser = context.getCurrentUser(); EPerson currentUser = context.getCurrentUser();
if(currentUser != null){ if(currentUser != null){
resourceQuery.append(" OR e").append(currentUser.getID()); resourceQuery.append(" OR e").append(currentUser.getID());

View File

@@ -37,6 +37,9 @@ public class Handle implements ReloadableEntity<Integer> {
@JoinColumn(name = "resource_id") @JoinColumn(name = "resource_id")
private DSpaceObject dso; private DSpaceObject dso;
/*
* {@see org.dspace.core.Constants#Constants Constants}
*/
@Column(name = "resource_type_id") @Column(name = "resource_type_id")
private Integer resourceTypeId; private Integer resourceTypeId;
@@ -74,10 +77,16 @@ public class Handle implements ReloadableEntity<Integer> {
return dso; return dso;
} }
/*
* @param resourceTypeId the integer constant of the DSO, see {@link org.dspace.core.Constants#Constants Constants}
*/
public void setResourceTypeId(Integer resourceTypeId) { public void setResourceTypeId(Integer resourceTypeId) {
this.resourceTypeId = resourceTypeId; this.resourceTypeId = resourceTypeId;
} }
/*
* @return the integer constant of the DSO, see {@link org.dspace.core.Constants#Constants Constants}
*/
public Integer getResourceTypeId() { public Integer getResourceTypeId() {
return resourceTypeId; return resourceTypeId;
} }

View File

@@ -128,7 +128,7 @@ public class ItemCheck extends Check {
return sb.toString(); return sb.toString();
} }
public String getCollectionSizesInfo(Context context) throws SQLException { public String getCollectionSizesInfo(final Context context) throws SQLException {
final StringBuffer ret = new StringBuffer(); final StringBuffer ret = new StringBuffer();
List<Map.Entry<Collection, Long>> colBitSizes = collectionService.getCollectionsWithBitstreamSizesTotal(context); List<Map.Entry<Collection, Long>> colBitSizes = collectionService.getCollectionsWithBitstreamSizesTotal(context);
long total_size = 0; long total_size = 0;
@@ -137,8 +137,8 @@ public class ItemCheck extends Check {
@Override @Override
public int compare(Map.Entry<Collection, Long> o1, Map.Entry<Collection, Long> o2) { public int compare(Map.Entry<Collection, Long> o1, Map.Entry<Collection, Long> o2) {
try { try {
return CollectionDropDown.collectionPath(o1.getKey()).compareTo( return CollectionDropDown.collectionPath(context, o1.getKey()).compareTo(
CollectionDropDown.collectionPath(o2.getKey()) CollectionDropDown.collectionPath(context, o2.getKey())
); );
} catch (Exception e) { } catch (Exception e) {
ret.append(e.getMessage()); ret.append(e.getMessage());
@@ -151,7 +151,7 @@ public class ItemCheck extends Check {
total_size += size; total_size += size;
Collection col = row.getKey(); Collection col = row.getKey();
ret.append(String.format( ret.append(String.format(
"\t%s: %s\n", CollectionDropDown.collectionPath(col), FileUtils.byteCountToDisplaySize((long) size))); "\t%s: %s\n", CollectionDropDown.collectionPath(context, col), FileUtils.byteCountToDisplaySize((long) size)));
} }
ret.append(String.format( ret.append(String.format(
"Total size: %s\n", FileUtils.byteCountToDisplaySize(total_size))); "Total size: %s\n", FileUtils.byteCountToDisplaySize(total_size)));

View File

@@ -88,7 +88,7 @@ public class DOI
* returns the resource type of the DSpaceObject the DOI is or was assigned * returns the resource type of the DSpaceObject the DOI is or was assigned
* to. The resource type is set automatically when a DOI is assigned to a * to. The resource type is set automatically when a DOI is assigned to a
* DSpaceObject, using {@link #setDSpaceObject(org.dspace.content.DSpaceObject) }. * DSpaceObject, using {@link #setDSpaceObject(org.dspace.content.DSpaceObject) }.
* @return * @return the integer constant of the DSO, see {@link org.dspace.core.Constants#Constants Constants}
*/ */
public Integer getResourceTypeId() { public Integer getResourceTypeId() {
return this.resourceTypeId; return this.resourceTypeId;

View File

@@ -144,7 +144,7 @@ public class DOIIdentifierProvider
* This identifier provider supports identifiers of type * This identifier provider supports identifiers of type
* {@link org.dspace.identifier.DOI}. * {@link org.dspace.identifier.DOI}.
* @param identifier to check if it will be supported by this provider. * @param identifier to check if it will be supported by this provider.
* @return * @return boolean
*/ */
@Override @Override
public boolean supports(Class<? extends Identifier> identifier) public boolean supports(Class<? extends Identifier> identifier)
@@ -160,7 +160,7 @@ public class DOIIdentifierProvider
* <li>http://dx.doi.org/10.123/456</li> * <li>http://dx.doi.org/10.123/456</li>
* </ul> * </ul>
* @param identifier to check if it is in a supported format. * @param identifier to check if it is in a supported format.
* @return * @return boolean
*/ */
@Override @Override
public boolean supports(String identifier) public boolean supports(String identifier)
@@ -231,7 +231,7 @@ public class DOIIdentifierProvider
* @param dso DSpaceObject the DOI should be reserved for. Some metadata of * @param dso DSpaceObject the DOI should be reserved for. Some metadata of
* this object will be send to the registration agency. * this object will be send to the registration agency.
* @param identifier DOI to register in a format that * @param identifier DOI to register in a format that
* {@link FormatIdentifier(String)} accepts. * {@link org.dspace.identifier.service.DOIService#formatIdentifier(String)} accepts.
* @throws IdentifierException If the format of {@code identifier} was * @throws IdentifierException If the format of {@code identifier} was
* unrecognized or if it was impossible to * unrecognized or if it was impossible to
* reserve the DOI (registration agency denied * reserve the DOI (registration agency denied
@@ -239,7 +239,7 @@ public class DOIIdentifierProvider
* @throws IllegalArgumentException If {@code identifier} is a DOI already * @throws IllegalArgumentException If {@code identifier} is a DOI already
* registered for another DSpaceObject then * registered for another DSpaceObject then
* {@code dso}. * {@code dso}.
* @see IdentifierProvider.reserve(Context, DSpaceObject, String) * @see org.dspace.identifier.IdentifierProvider#reserve(Context, DSpaceObject, String)
*/ */
@Override @Override
public void reserve(Context context, DSpaceObject dso, String identifier) public void reserve(Context context, DSpaceObject dso, String identifier)
@@ -696,7 +696,7 @@ public class DOIIdentifierProvider
* Returns a DSpaceObject depending on its DOI. * Returns a DSpaceObject depending on its DOI.
* @param context the context * @param context the context
* @param identifier The DOI in a format that is accepted by * @param identifier The DOI in a format that is accepted by
* {@link formatIdentifier(String)}. * {@link org.dspace.identifier.service.DOIService#formatIdentifier(String)}.
* @return Null if the DOI couldn't be found or the associated DSpaceObject. * @return Null if the DOI couldn't be found or the associated DSpaceObject.
* @throws SQLException if database error * @throws SQLException if database error
* @throws IdentifierException If {@code identifier} is null or an empty string. * @throws IdentifierException If {@code identifier} is null or an empty string.

View File

@@ -54,8 +54,8 @@ public class DataCiteXMLCreator
/** /**
* DisseminationCrosswalk to map local metadata into DataCite metadata. The * DisseminationCrosswalk to map local metadata into DataCite metadata. The
* name of the crosswalk is set by {@link setDisseminationCrosswalk(String) * name of the crosswalk is set by {@link #setDisseminationCrosswalkName(String)
* setDisseminationCrosswalk} which instantiates the crosswalk. * setDisseminationCrosswalkName} which instantiates the crosswalk.
*/ */
protected ParameterizedDisseminationCrosswalk xwalk; protected ParameterizedDisseminationCrosswalk xwalk;

View File

@@ -17,9 +17,10 @@ import org.dspace.core.Context;
* A DOIConnector should care about rules of the registration agency. For * A DOIConnector should care about rules of the registration agency. For
* example, if the registration agency wants us to reserve a DOI before we can * example, if the registration agency wants us to reserve a DOI before we can
* register it, the DOIConnector should check if a DOI is reserved. Use a * register it, the DOIConnector should check if a DOI is reserved. Use a
* {@link DOIIdenfierException} and set its error code in case of any errors. * {@link org.dspace.identifier.doi.DOIIdentifierException#DOIIdentifierException DOIIdentifierException}.
* and set its error code in case of any errors.
* For the given example you should use * For the given example you should use
* {@code DOIIdentifierException.RESERVER_FIRST} as error code. * {@code DOIIdentifierException.RESERVE_FIRST} as error code.
* *
* @author Pascal-Nicolas Becker * @author Pascal-Nicolas Becker
*/ */
@@ -49,7 +50,8 @@ public interface DOIConnector {
* Sends a request to the DOI registry to reserve a DOI. * Sends a request to the DOI registry to reserve a DOI.
* *
* The DOIConnector should check weather this DOI is reserved for another * The DOIConnector should check weather this DOI is reserved for another
* object already. In this case it should throw an {@link * object already. In this case it should throw an
* {@link org.dspace.identifier.doi.DOIIdentifierException#DOIIdentifierException DOIIdentifierException}.
* DOIIdentifierException} and set the error code to {@code * DOIIdentifierException} and set the error code to {@code
* DOIIdentifierException.DOI_ALREADY_EXISTS}. * DOIIdentifierException.DOI_ALREADY_EXISTS}.
* *
@@ -66,8 +68,10 @@ public interface DOIConnector {
* The DOIConnector ensures compliance with the workflow of the registration * The DOIConnector ensures compliance with the workflow of the registration
* agency. For example, if a DOI has to be reserved before it can be * agency. For example, if a DOI has to be reserved before it can be
* registered the DOIConnector has to check if it is reserved. In this case * registered the DOIConnector has to check if it is reserved. In this case
* you can throw an DOIIdentifierExcpetion and set the error code to * you can throw an
* {@link DOIIdentifierException.RESERVE_FIRST}. * {@link org.dspace.identifier.doi.DOIIdentifierException#DOIIdentifierException DOIIdentifierException}.
* and set the error code to
* {@code DOIIdentifierException.RESERVE_FIRST}.
* *
* @param context * @param context
* @param dso * @param dso

View File

@@ -103,7 +103,7 @@ implements DOIConnector
/** /**
* DisseminationCrosswalk to map local metadata into DataCite metadata. * DisseminationCrosswalk to map local metadata into DataCite metadata.
* The name of the crosswalk is set by spring dependency injection using * The name of the crosswalk is set by spring dependency injection using
* {@link setDisseminationCrosswalk(String) setDisseminationCrosswalk} which * {@link #setDisseminationCrosswalkName(String) setDisseminationCrosswalkName} which
* instantiates the crosswalk. * instantiates the crosswalk.
*/ */
protected ParameterizedDisseminationCrosswalk xwalk; protected ParameterizedDisseminationCrosswalk xwalk;
@@ -545,7 +545,7 @@ implements DOIConnector
// 412 Precondition failed: DOI was not reserved before registration! // 412 Precondition failed: DOI was not reserved before registration!
case (412) : case (412) :
{ {
log.error("We tried to register a DOI {} that was not reserved " log.error("We tried to register a DOI {} that has not been reserved "
+ "before! The registration agency told us: {}.", doi, + "before! The registration agency told us: {}.", doi,
resp.getContent()); resp.getContent());
throw new DOIIdentifierException("There was an error in handling " throw new DOIIdentifierException("There was an error in handling "
@@ -571,8 +571,8 @@ implements DOIConnector
public void updateMetadata(Context context, DSpaceObject dso, String doi) public void updateMetadata(Context context, DSpaceObject dso, String doi)
throws DOIIdentifierException throws DOIIdentifierException
{ {
// We can use reserveDOI to update metadata. Datacite API uses the same // We can use reserveDOI to update metadata. DataCite API uses the same
// request for reservartion as for updating metadata. // request for reservation as for updating metadata.
this.reserveDOI(context, dso, doi); this.reserveDOI(context, dso, doi);
} }
@@ -612,7 +612,7 @@ implements DOIConnector
} }
finally finally
{ {
// release ressources // release resources
try try
{ {
EntityUtils.consume(reqEntity); EntityUtils.consume(reqEntity);
@@ -731,7 +731,7 @@ implements DOIConnector
} }
finally finally
{ {
// release ressources // release resources
try try
{ {
EntityUtils.consume(reqEntity); EntityUtils.consume(reqEntity);
@@ -748,7 +748,7 @@ implements DOIConnector
* *
* @param req * @param req
* @param doi * @param doi
* @return * @return response from DataCite
* @throws DOIIdentifierException if DOI error * @throws DOIIdentifierException if DOI error
*/ */
protected DataCiteResponse sendHttpRequest(HttpUriRequest req, String doi) protected DataCiteResponse sendHttpRequest(HttpUriRequest req, String doi)
@@ -774,7 +774,7 @@ implements DOIConnector
content = EntityUtils.toString(entity, "UTF-8"); content = EntityUtils.toString(entity, "UTF-8");
} }
/* While debugging it can be useful to see whitch requests are send: /* While debugging it can be useful to see which requests are sent:
* *
* log.debug("Going to send HTTP request of type " + req.getMethod() + "."); * log.debug("Going to send HTTP request of type " + req.getMethod() + ".");
* log.debug("Will be send to " + req.getURI().toString() + "."); * log.debug("Will be send to " + req.getURI().toString() + ".");
@@ -861,7 +861,7 @@ implements DOIConnector
{ {
try try
{ {
// Release any ressources used by HTTP-Request. // Release any resources used by HTTP-Request.
if (null != entity) if (null != entity)
{ {
EntityUtils.consume(entity); EntityUtils.consume(entity);

View File

@@ -159,10 +159,10 @@ public class EZIDRequest
/** /**
* Create an identifier with a given name. The name is the end of the * Create an identifier with a given name. The name is the end of the
* request path. Note: to "reserve" a given identifier, include "_status = * request path. Note: to "reserve" a given identifier, include "_status =
* reserved" in {@link metadata}. * reserved" in {@code metadata}.
* *
* @param metadata ANVL-encoded key/value pairs. * @param metadata ANVL-encoded key/value pairs.
* @return * @return Decoded response data evoked by a request made to EZID.
*/ */
public EZIDResponse create(String name, Map<String, String> metadata) public EZIDResponse create(String name, Map<String, String> metadata)
throws IOException, IdentifierException, URISyntaxException throws IOException, IdentifierException, URISyntaxException
@@ -182,10 +182,10 @@ public class EZIDRequest
/** /**
* Ask EZID to create a unique identifier and return its name. NOTE: to * Ask EZID to create a unique identifier and return its name. NOTE: to
* "reserve" a unique identifier, include "_status = reserved" in {@link metadata}. * "reserve" a unique identifier, include "_status = reserved" in {@code metadata}.
* *
* @param metadata ANVL-encoded key/value pairs. * @param metadata ANVL-encoded key/value pairs.
* @return * @return Decoded response data evoked by a request made to EZID.
*/ */
public EZIDResponse mint(Map<String, String> metadata) public EZIDResponse mint(Map<String, String> metadata)
throws IOException, IdentifierException, URISyntaxException throws IOException, IdentifierException, URISyntaxException
@@ -209,7 +209,7 @@ public class EZIDRequest
* *
* @param metadata fields to be altered. Leave the value of a field's empty * @param metadata fields to be altered. Leave the value of a field's empty
* to delete the field. * to delete the field.
* @return * @return Decoded response data evoked by a request made to EZID.
*/ */
public EZIDResponse modify(String name, Map<String, String> metadata) public EZIDResponse modify(String name, Map<String, String> metadata)
throws IOException, IdentifierException, URISyntaxException throws IOException, IdentifierException, URISyntaxException

View File

@@ -48,7 +48,7 @@ public interface IdentifierService {
* @return the matching identifiers, or the site identifier if the object * @return the matching identifiers, or the site identifier if the object
* is a Site, or an empty array if no matching identifier is found. * is a Site, or an empty array if no matching identifier is found.
*/ */
List<String> lookup(Context contex, DSpaceObject dso); List<String> lookup(Context context, DSpaceObject dso);
/** /**
* *

View File

@@ -1,31 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external;
/** Represents a problem with the input source: e.g. cannot connect to the source.
* Created by Roeland Dillen (roeland at atmire dot com)
* Date: 19/09/12
* Time: 13:17
*/
public class MetadataSourceException extends Exception {
public MetadataSourceException() {
}
public MetadataSourceException(String s) {
super(s);
}
public MetadataSourceException(String s, Throwable throwable) {
super(s, throwable);
}
public MetadataSourceException(Throwable throwable) {
super(throwable);
}
}

View File

@@ -1,55 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external;
import org.apache.commons.collections.map.MultiValueMap;
import java.util.Collection;
/**
* Created by Roeland Dillen (roeland at atmire dot com)
* Date: 27/09/12
* Time: 15:26
*/
public class Query {
private MultiValueMap parameters = new MultiValueMap();
public MultiValueMap getParameters() {
return parameters;
}
public void addParameter(String key,Object value){
parameters.put(key,value);
}
protected void addSingletonParameter(String key,Object value){
parameters.remove(key);
parameters.put(key,value);
}
public <T> T getParameterAsClass(String key, Class<T> clazz){
Collection c=parameters.getCollection(key);
if(c==null||c.isEmpty()) return null;
else {
Object o=c.iterator().next();
if(clazz.isAssignableFrom(o.getClass()))
return (T) o ;
else return null;
}
}
public Collection getParameter(String key){
return parameters.getCollection(key);
}
public void setParameters(MultiValueMap parameters) {
this.parameters = parameters;
}
}

View File

@@ -10,7 +10,19 @@
# Introduction <a name="Introduction"></a> # # Introduction <a name="Introduction"></a> #
This documentation explains the features and the usage of the importer framework. This documentation explains the features and the usage of the importer framework.
Enabling the framework can be achieved by removing the comment block from the following step in item-submission.xml
Implementation specific or additional configuration can be found in their related documentation, if any. (Some implementations use other submission steps altogether, so make sure to double check)
```
<step>
<heading>submit.progressbar.lookup</heading>
<processing-class>org.dspace.submit.step.XMLUIStartSubmissionLookupStep</processing-class>
<jspui-binding>org.dspace.app.webui.submit.step.JSPStartSubmissionLookupStep</jspui-binding>
<xmlui-binding>org.dspace.app.xmlui.aspect.submission.submit.StartSubmissionLookupStep</xmlui-binding>
<workflow-editable>true</workflow-editable>
</step>
```
## Features <a name="Features"></a> ## ## Features <a name="Features"></a> ##
@@ -38,17 +50,17 @@ This modular design also allows it to be completely independent of the user inte
# Implementation of an import source <a name="Example-implementation"></a> # # Implementation of an import source <a name="Example-implementation"></a> #
Each importer implementation must at least implement interface *org.dspace.importer.external.service.other.Imports* and implement the inherited methods. Each importer implementation must at least implement interface *org.dspace.importer.external.service.components.MetadataSource* and implement the inherited methods.
One can also choose to implement class *org.dspace.importer.external.service.other.Source* next to the Imports interface. This class contains functionality to handle request timeouts and to retry requests. One can also choose to implement class *org.dspace.importer.external.service.components.AbstractRemoteMetadataSource* next to the MetadataSource interface. This class contains functionality to handle request timeouts and to retry requests.
A third option is to implement class *org.dspace.importer.external.service.AbstractImportSourceService*. This class already implements both the Imports interface and Source class. AbstractImportSourceService has a generic type set 'RecordType'. In the importer implementation this type set should be the class of the records received from the remote source's response (e.g. when using axiom to get the records from the remote source's XML response, the importer implementation's type set is *org.apache.axiom.om.OMElement*). A third option is to implement class *org.dspace.importer.external.service.AbstractImportSourceService*. This class already implements both the MetadataSource interface and Source class. AbstractImportSourceService has a generic type set 'RecordType'. In the importer implementation this type set should be the class of the records received from the remote source's response (e.g. when using axiom to get the records from the remote source's XML response, the importer implementation's type set is *org.apache.axiom.om.OMElement*).
Implementing the AbstractImportSourceService allows the importer implementation to use the framework's build-in support to transform a record received from the remote source to an object of class *org.dspace.importer.external.datamodel.ImportRecord* containing DSpace metadata fields, as explained here: [Metadata mapping](#Mapping). Implementing the AbstractImportSourceService allows the importer implementation to use the framework's build-in support to transform a record received from the remote source to an object of class *org.dspace.importer.external.datamodel.ImportRecord* containing DSpace metadata fields, as explained here: [Metadata mapping](#Mapping).
## Inherited methods <a name="Inherited-methods"></a> ## ## Inherited methods <a name="Inherited-methods"></a> ##
Method getImportSource() should return a unique identifier. Importer implementations should not be called directly, but class *org.dspace.importer.external.service.ImportService* should be called instead. This class contains the same methods as the importer implementatons, but with an extra parameter 'url'. This url parameter should contain the same identifier that is returned by the getImportSource() method of the importer implementation you want to use. Method getImportSource() should return a unique identifier. Importer implementations should not be called directly, but class *org.dspace.importer.external.service.ImportService* should be called instead. This class contains the same methods as the importer implementations, but with an extra parameter 'url'. This url parameter should contain the same identifier that is returned by the getImportSource() method of the importer implementation you want to use.
The other inherited methods are used to query the remote source. The other inherited methods are used to query the remote source.

View File

@@ -1,21 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external;
import org.dspace.importer.external.service.other.MetadataSource;
/**
* Created by: Antoine Snyers (antoine at atmire dot com)
* Date: 27 Oct 2014
*/
public abstract interface SourceExceptionHandler<T extends MetadataSource> {
public abstract void handle(T source);
}

View File

@@ -15,22 +15,37 @@ import java.util.LinkedList;
import java.util.List; import java.util.List;
/** /**
* Created by Roeland Dillen (roeland at atmire dot com) * This class contains all MetadatumDTO objects from an imported item
* Date: 17/09/12 *
* Time: 14:03 * @author Roeland Dillen (roeland at atmire dot com)
*/ */
public class ImportRecord { public class ImportRecord {
private List<MetadatumDTO> valueList = null; private List<MetadatumDTO> valueList = null;
/**
* Retrieve an unmodifiableList of MetadatumDTO
* @return List of MetadatumDTO
*/
public List<MetadatumDTO> getValueList() { public List<MetadatumDTO> getValueList() {
return Collections.unmodifiableList(valueList); return Collections.unmodifiableList(valueList);
} }
/**
* Create an ImportRecord instance initialized with a List of MetadatumDTO objects
* @param valueList
*/
public ImportRecord(List<MetadatumDTO> valueList) { public ImportRecord(List<MetadatumDTO> valueList) {
//don't want to alter the original list. Also now I can control the type of list //don't want to alter the original list. Also now I can control the type of list
this.valueList = new LinkedList<MetadatumDTO>(valueList); this.valueList = new LinkedList<>(valueList);
} }
/**
* Build a string based on the values in the valueList object
* The syntax will be
* Record{valueList={"schema"; "element" ; "qualifier"; "value"}}
*
* @return a concatenated string containing all values of the MetadatumDTO objects in valueList
*/
@Override @Override
public String toString() { public String toString() {
final StringBuilder sb = new StringBuilder(); final StringBuilder sb = new StringBuilder();
@@ -55,6 +70,13 @@ public class ImportRecord {
return sb.toString(); return sb.toString();
} }
/**
* Return the MetadatumDTO's that are related to a given schema/element/qualifier pair/triplet
* @param schema
* @param element
* @param qualifier
* @return the MetadatumDTO's that are related to a given schema/element/qualifier pair/triplet
*/
public Collection<MetadatumDTO> getValue(String schema, String element, String qualifier){ public Collection<MetadatumDTO> getValue(String schema, String element, String qualifier){
List<MetadatumDTO> values=new LinkedList<MetadatumDTO>(); List<MetadatumDTO> values=new LinkedList<MetadatumDTO>();
for(MetadatumDTO value:valueList){ for(MetadatumDTO value:valueList){
@@ -69,6 +91,10 @@ public class ImportRecord {
return values; return values;
} }
/**
* Add a value to the valueList
* @param value The MetadatumDTO to add to the valueList
*/
public void addValue(MetadatumDTO value){ public void addValue(MetadatumDTO value){
this.valueList.add(value); this.valueList.add(value);
} }

View File

@@ -0,0 +1,92 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.datamodel;
import org.apache.commons.collections.map.MultiValueMap;
import java.util.Collection;
/** Represents a query to a source. Subclasses may enforce stricter typing or more verbose setting of parameters.
* @author Roeland Dillen (roeland at atmire dot com)
*/
public class Query {
private MultiValueMap parameters = new MultiValueMap();
/**
* Retrieve the parameters set to this Query object
*
* @return the {@link org.apache.commons.collections.map.MultiValueMap} set to this object
*/
public MultiValueMap getParameters() {
return parameters;
}
/**
* In the parameters variable, adds the value to the collection associated with the specified key.
* <p>
* Unlike a normal <code>Map</code> the previous value is not replaced.
* Instead the new value is added to the collection stored against the key.
*
* @param key the key to store against
* @param value the value to add to the collection at the key
*/
public void addParameter(String key,Object value){
parameters.put(key,value);
}
/**
* In the parameters variable, adds the value to the collection associated with the specified key.
* <p>
* Unlike {@link #addParameter(String, Object)} the previous value is overridden.
* First, any existing values are removed, then the new value is added to the collection at the specified key
*
* @param key the key to store against
* @param value the value to add to the collection at the key
*/
protected void addSingletonParameter(String key,Object value){
parameters.remove(key);
parameters.put(key,value);
}
/**
* Retrieve a parameter as a certain given class
* @param key the key to retrieve the parameter from
* @param clazz The classtype to retrieve. (If no parameter with that class is found, a <tt>null</tt> value is returned
*/
public <T> T getParameterAsClass(String key, Class<T> clazz){
Collection c=parameters.getCollection(key);
if(c==null||c.isEmpty()) return null;
else {
Object o=c.iterator().next();
if(clazz.isAssignableFrom(o.getClass()))
return (T) o ;
else return null;
}
}
/**
* Gets the collection mapped to the specified key.
* This method is a convenience method to typecast the result of <code>get(key)</code>.
*
* @param key the key used to retrieve the collection
* @return the collection mapped to the key, null if no mapping
*/
public Collection getParameter(String key){
return parameters.getCollection(key);
}
/**
* Set the parameters of this query object based on a given {@link org.apache.commons.collections.map.MultiValueMap}
* @param parameters a {@link org.apache.commons.collections.map.MultiValueMap} to set to this Query object
*/
public void setParameters(MultiValueMap parameters) {
this.parameters = parameters;
}
}

View File

@@ -0,0 +1,14 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/** Package containing datamodel classes which are constructed/used during the retrieval of Records
* Strictly data-centric classes
* @author Roeland Dillen (roeland at atmire dot com)
* @author Jonas Van Goolen (jonas at atmire dot com)
*/
package org.dspace.importer.external.datamodel;

View File

@@ -0,0 +1,70 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.exception;
/** Represents a problem with the input source: e.g. cannot connect to the source.
* @author Roeland Dillen (roeland at atmire dot com)
*/
public class MetadataSourceException extends Exception {
/**
* Constructs a new exception with {@code null} as its detail message.
* The cause is not initialized, and may subsequently be initialized by a
* call to {@link #initCause}.
*/
public MetadataSourceException() {
super();
}
/**
* Constructs a new exception with the specified detail message. The
* cause is not initialized, and may subsequently be initialized by
* a call to {@link #initCause}.
*
* @param message the detail message. The detail message is saved for
* later retrieval by the {@link #getMessage()} method.
*/
public MetadataSourceException(String message) {
super(message);
}
/**
* Constructs a new exception with the specified detail message and
* cause. <p>Note that the detail message associated with
* {@code cause} is <i>not</i> automatically incorporated in
* this exception's detail message.
*
* @param message the detail message (which is saved for later retrieval
* by the {@link #getMessage()} method).
* @param cause the cause (which is saved for later retrieval by the
* {@link #getCause()} method). (A <tt>null</tt> value is
* permitted, and indicates that the cause is nonexistent or
* unknown.)
*/
public MetadataSourceException(String message, Throwable cause) {
super(message, cause);
}
/**
* Constructs a new exception with the specified cause and a detail
* message of <tt>(cause==null ? null : cause.toString())</tt> (which
* typically contains the class and detail message of <tt>cause</tt>).
* This constructor is useful for exceptions that are little more than
* wrappers for other throwables (for example, {@link
* java.security.PrivilegedActionException}).
*
* @param cause the cause (which is saved for later retrieval by the
* {@link #getCause()} method). (A <tt>null</tt> value is
* permitted, and indicates that the cause is nonexistent or
* unknown.)
*/
public MetadataSourceException(Throwable cause) {
super(cause);
}
}

View File

@@ -0,0 +1,26 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.exception;
import org.dspace.importer.external.service.components.AbstractRemoteMetadataSource;
/**
* Represent a handler that forces implementations to define their own behaviour for exceptions originating from
* @author Antoine Snyers (antoine at atmire dot com)
*/
public abstract interface SourceExceptionHandler<T extends AbstractRemoteMetadataSource> {
/**
* Represents a method contract to handle Exceptions originating from the source in a specific way
* Implementations define their own desired behaviour
* @param source The source of the exception
*/
public abstract void handle(T source);
}

View File

@@ -0,0 +1,13 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/**
* External importer Exception classes + possible handlers
* @author Jonas Van Goolen (jonas at atmire dot com)
*/
package org.dspace.importer.external.exception;

View File

@@ -10,7 +10,7 @@ package org.dspace.importer.external.metadatamapping;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.importer.external.metadatamapping.contributor.MetadataContributor; import org.dspace.importer.external.metadatamapping.contributor.MetadataContributor;
import org.dspace.importer.external.metadatamapping.service.MetadataProcessorService; import org.dspace.importer.external.metadatamapping.transform.MetadataProcessorService;
import java.util.Collection; import java.util.Collection;
import java.util.LinkedList; import java.util.LinkedList;
@@ -18,9 +18,10 @@ import java.util.List;
import java.util.Map; import java.util.Map;
/** /**
* Created by Roeland Dillen (roeland at atmire dot com) * Abstract class that implements {@link MetadataFieldMapping}
* Date: 19/09/12 * This class adds a default implementation for the MetadataFieldMapping methods
* Time: 10:09 *
* @author Roeland Dillen (roeland at atmire dot com)
*/ */
public abstract class AbstractMetadataFieldMapping<RecordType> implements MetadataFieldMapping<RecordType, MetadataContributor<RecordType>> { public abstract class AbstractMetadataFieldMapping<RecordType> implements MetadataFieldMapping<RecordType, MetadataContributor<RecordType>> {
@@ -31,13 +32,25 @@ public abstract class AbstractMetadataFieldMapping<RecordType> implements Metada
*/ */
private static Logger log = Logger.getLogger(AbstractMetadataFieldMapping.class); private static Logger log = Logger.getLogger(AbstractMetadataFieldMapping.class);
/* A map containing what processing has to be done on a given metadataFieldConfig.
* The processing of a value is used to determine the actual value that will be returned used.
*/
private Map<MetadataFieldConfig, MetadataProcessorService> metadataProcessorMap; private Map<MetadataFieldConfig, MetadataProcessorService> metadataProcessorMap;
/**
* Set a map of metadataprocessors. This map is used to process metadata to make it more compliant for certain metadata fields
* @param metadataProcessorMap
*/
public void setMetadataProcessorMap(Map<MetadataFieldConfig, MetadataProcessorService> metadataProcessorMap) public void setMetadataProcessorMap(Map<MetadataFieldConfig, MetadataProcessorService> metadataProcessorMap)
{ {
this.metadataProcessorMap = metadataProcessorMap; this.metadataProcessorMap = metadataProcessorMap;
} }
/**
* Return the metadataProcessor used to update values to make them more compliant for certain goals
* @param metadataField to retrieve processor for
* @return metadataProcessor
*/
public MetadataProcessorService getMetadataProcessor(MetadataFieldConfig metadataField) public MetadataProcessorService getMetadataProcessor(MetadataFieldConfig metadataField)
{ {
if(metadataProcessorMap != null) if(metadataProcessorMap != null)
@@ -48,11 +61,14 @@ public abstract class AbstractMetadataFieldMapping<RecordType> implements Metada
} }
} }
/**
* @param field MetadataFieldConfig representing what to map the value to
* @param value The value to map to a MetadatumDTO
* @return A metadatumDTO created from the field and value
*/
public MetadatumDTO toDCValue(MetadataFieldConfig field, String value) { public MetadatumDTO toDCValue(MetadataFieldConfig field, String value) {
MetadatumDTO dcValue = new MetadatumDTO(); MetadatumDTO dcValue = new MetadatumDTO();
if (field == null) return null; if (field == null) return null;
MetadataProcessorService metadataProcessor = getMetadataProcessor(field); MetadataProcessorService metadataProcessor = getMetadataProcessor(field);
if(metadataProcessor != null) if(metadataProcessor != null)
@@ -66,48 +82,31 @@ public abstract class AbstractMetadataFieldMapping<RecordType> implements Metada
return dcValue; return dcValue;
} }
private boolean reverseDifferent = false; /**
* Retrieve the metadataFieldMap set to this class
private String AND = "AND"; * @return Map<MetadataFieldConfig, MetadataContributor<RecordType>> representing the metadataFieldMap
private String OR = "OR"; */
private String NOT = "NOT";
public String getAND() {
return AND;
}
public void setAND(String AND) {
this.AND = AND;
}
public String getOR() {
return OR;
}
public void setOR(String OR) {
this.OR = OR;
}
public String getNOT() {
return NOT;
}
public void setNOT(String NOT) {
this.NOT = NOT;
}
public Map<MetadataFieldConfig, MetadataContributor<RecordType>> getMetadataFieldMap() { public Map<MetadataFieldConfig, MetadataContributor<RecordType>> getMetadataFieldMap() {
return metadataFieldMap; return metadataFieldMap;
} }
/** Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to the item.
*/
public void setMetadataFieldMap(Map<MetadataFieldConfig, MetadataContributor<RecordType>> metadataFieldMap) { public void setMetadataFieldMap(Map<MetadataFieldConfig, MetadataContributor<RecordType>> metadataFieldMap) {
this.metadataFieldMap = metadataFieldMap; this.metadataFieldMap = metadataFieldMap;
for(MetadataContributor<RecordType> mc:metadataFieldMap.values()){ for(MetadataContributor<RecordType> mc:metadataFieldMap.values()){
mc.setMetadataFieldMapping(this); mc.setMetadataFieldMapping(this);
} }
} }
/**
* Loop over the MetadataContributors and return their concatenated retrieved metadatumDTO objects
* @param record Used to retrieve the MetadatumDTO
* @return Lit of metadatumDTO
*/
@Override @Override
public Collection<MetadatumDTO> resultToDCValueMapping(RecordType record) { public Collection<MetadatumDTO> resultToDCValueMapping(RecordType record) {
List<MetadatumDTO> values=new LinkedList<MetadatumDTO>(); List<MetadatumDTO> values=new LinkedList<MetadatumDTO>();

View File

@@ -8,9 +8,10 @@
package org.dspace.importer.external.metadatamapping; package org.dspace.importer.external.metadatamapping;
/** /**
* Created by Roeland Dillen (roeland at atmire dot com) * A generalised configuration for metadatafields.
* Date: 19/09/12 * This is used to make the link between values and the actual MetadatumDTO object.
* Time: 10:11 *
* @author Roeland Dillen (roeland at atmire dot com)
*/ */
public class MetadataFieldConfig { public class MetadataFieldConfig {
private String schema; private String schema;
@@ -18,6 +19,12 @@ public class MetadataFieldConfig {
private String qualifier; private String qualifier;
/**
* Indicates whether some other object is "equal to" this one.
* @param o the reference object with which to compare.
* @return {@code true} if this object is the same as the obj
* argument; {@code false} otherwise.
*/
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) return true; if (this == o) return true;
@@ -32,6 +39,10 @@ public class MetadataFieldConfig {
return true; return true;
} }
/**
* Create the String representation of the MetadataFieldConfig
* @return a string representation of the MetadataFieldConfig
*/
@Override @Override
public String toString() { public String toString() {
final StringBuilder sb = new StringBuilder(); final StringBuilder sb = new StringBuilder();
@@ -43,6 +54,12 @@ public class MetadataFieldConfig {
return sb.toString(); return sb.toString();
} }
/**
* Returns a hash code value for the object. This method is
* supported for the benefit of hash tables such as those provided by
* {@link java.util.HashMap}.
* @return a hash code value for this object.
*/
@Override @Override
public int hashCode() { public int hashCode() {
int result = schema.hashCode(); int result = schema.hashCode();
@@ -51,26 +68,41 @@ public class MetadataFieldConfig {
return result; return result;
} }
public String getSchema() {
return schema;
}
/**
* Create a MetadataFieldConfig based on a given MetadatumDTO
* This MetadatumDTO object contains the schema, element and qualifier needed to initialize the MetadataFieldConfig
* @param value
*/
public MetadataFieldConfig(MetadatumDTO value) { public MetadataFieldConfig(MetadatumDTO value) {
this.schema = value.getSchema(); this.schema = value.getSchema();
this.element = value.getElement(); this.element = value.getElement();
this.qualifier = value.getQualifier(); this.qualifier = value.getQualifier();
} }
/**
* An empty initialization of MetadataFieldConfig
*/
public MetadataFieldConfig() { public MetadataFieldConfig() {
} }
/**
* Create a MetadataFieldConfig using a schema,element and qualifier
* @param schema The schema to set to this object
* @param element The element to set to this object
* @param qualifier The qualifier to set to this object
*/
public MetadataFieldConfig(String schema, String element, String qualifier) { public MetadataFieldConfig(String schema, String element, String qualifier) {
this.schema = schema; this.schema = schema;
this.element = element; this.element = element;
this.qualifier = qualifier; this.qualifier = qualifier;
} }
/**
* Create a MetadataFieldConfig using a single value.
* This value is split up into schema, element and qualifier, based on a dot(.)
* @param full A string representing the schema.element.qualifier triplet
*/
public MetadataFieldConfig(String full) { public MetadataFieldConfig(String full) {
String elements[]=full.split("\\."); String elements[]=full.split("\\.");
if(elements.length==2){ if(elements.length==2){
@@ -83,34 +115,75 @@ public class MetadataFieldConfig {
} }
} }
/**
* Create a MetadataFieldConfig using a schema and element
* qualifier will be set to <code>null</code>
* @param schema The schema to set to this object
* @param element The element to set to this object
*/
public MetadataFieldConfig(String schema, String element) { public MetadataFieldConfig(String schema, String element) {
this.schema = schema; this.schema = schema;
this.element = element; this.element = element;
this.qualifier = null; this.qualifier = null;
} }
/**
* Set the schema to this MetadataFieldConfig
* @param schema The schema to set to this object
*/
public void setSchema(String schema) { public void setSchema(String schema) {
this.schema = schema; this.schema = schema;
} }
/**
* Return the schema set to this object.
* <code>null</code> if nothing is set
* @return The schema of this object
*/
public String getSchema() {
return schema;
}
/**
* Return a string representing the field of this object
* @return The field that is set to this object, in the form of schema.element.qualifier
*/
public String getField() { public String getField() {
return schema + "." + element + (qualifier==null?"":("." + qualifier)); return schema + "." + element + (qualifier==null?"":("." + qualifier));
} }
/**
* Return the qualifier set to this object.
* <code>null</code> if nothing is set
* @return The qualifier of this object
*/
public String getElement() { public String getElement() {
return element; return element;
} }
/**
* Set the element to this MetadataFieldConfig
* @param element The element to set to this object
*/
public void setElement(String element) { public void setElement(String element) {
this.element = element; this.element = element;
} }
/**
* Return the qualifier set to this object.
* <code>null</code> if nothing is set
* @return The qualifier of this object
*/
public String getQualifier() { public String getQualifier() {
return qualifier; return qualifier;
} }
/**
* Set the qualifier to this MetadataFieldConfig
* @param qualifier The qualifier to set to this object
*/
public void setQualifier(String qualifier) { public void setQualifier(String qualifier) {
this.qualifier = qualifier; this.qualifier = qualifier;
} }

View File

@@ -10,16 +10,27 @@ package org.dspace.importer.external.metadatamapping;
import java.util.Collection; import java.util.Collection;
/** /**
* Created by Roeland Dillen (roeland at atmire dot com) * Represents an interface for the mapping of the metadatum fields
* Date: 18/09/12 *
* Time: 14:41 * @author Roeland Dillen (roeland at atmire dot com)
*/ */
public interface MetadataFieldMapping<RecordType,QueryType> { public interface MetadataFieldMapping<RecordType,QueryType> {
public MetadatumDTO toDCValue(MetadataFieldConfig field, String mf); /**
* @param field MetadataFieldConfig representing what to map the value to
* @param value The value to map to a MetadatumDTO
* @return A metadatumDTO created from the field and value
*/
public MetadatumDTO toDCValue(MetadataFieldConfig field, String value);
public Collection<MetadatumDTO> resultToDCValueMapping(RecordType record);
/**
* Create a collection of MetadatumDTO retrieved from a given RecordType
* @param record Used to retrieve the MetadatumDTO
* @return Collection of MetadatumDTO
*/
public Collection<MetadatumDTO> resultToDCValueMapping(RecordType record);

View File

@@ -8,9 +8,13 @@
package org.dspace.importer.external.metadatamapping; package org.dspace.importer.external.metadatamapping;
/** /**
* Created by Philip Vissenaekens (philip at atmire dot com) * This class is used to cary data between processes.
* Date: 21/10/15 * Using this class, we have a uniform, generalised single Object type containing the information used by different classes.
* Time: 09:52 * This Data Transfer Object contains all data for a call pertaining metadata, resulting in the possibility to return a larger quantity of information.
* As this is a generalised class, we can use this across the external imports implementations
*
* @author Philip Vissenaekens (philip at atmire dot com)
*
*/ */
public class MetadatumDTO { public class MetadatumDTO {
@@ -19,33 +23,68 @@ public class MetadatumDTO {
private String qualifier; private String qualifier;
private String value; private String value;
/**
* An empty MetadatumDTO constructor
*/
public MetadatumDTO() { public MetadatumDTO() {
} }
/**
* Retrieve the schema set to this MetadatumDTO.
* Returns <tt>null</tt> of no schema is set
* @return schema
*/
public String getSchema() { public String getSchema() {
return schema; return schema;
} }
/**
* Set the schema to this MetadatumDTO
* @param schema
*/
public void setSchema(String schema) { public void setSchema(String schema) {
this.schema = schema; this.schema = schema;
} }
/**
* Retrieve the element set to this MetadatumDTO.
* Returns <tt>null</tt> of no element is set
* @return element
*/
public String getElement() { public String getElement() {
return element; return element;
} }
/**
* Set the element to this MetadatumDTO
* @param element
*/
public void setElement(String element) { public void setElement(String element) {
this.element = element; this.element = element;
} }
/**
* Retrieve the qualifier set to this MetadatumDTO.
* Returns <tt>null</tt> of no qualifier is set
* @return qualifier
*/
public String getQualifier() { public String getQualifier() {
return qualifier; return qualifier;
} }
/**
* Set the qualifier to this MetadatumDTO
* @param qualifier
*/
public void setQualifier(String qualifier) { public void setQualifier(String qualifier) {
this.qualifier = qualifier; this.qualifier = qualifier;
} }
/**
* Retrieve the value set to this MetadatumDTO.
* Returns <tt>null</tt> of no value is set
* @return value
*/
public String getValue() { public String getValue() {
return value; return value;
} }

View File

@@ -16,9 +16,8 @@ import java.util.LinkedList;
import java.util.List; import java.util.List;
/** /**
* Created by Philip Vissenaekens (philip at atmire dot com) * Wrapper class used to accommodate for the possibility of correlations between multiple MetadatumContributor objects
* Date: 17/06/15 * @author Philip Vissenaekens (philip at atmire dot com)
* Time: 11:02
*/ */
public class CombinedMetadatumContributor<T> implements MetadataContributor<T> { public class CombinedMetadatumContributor<T> implements MetadataContributor<T> {
private MetadataFieldConfig field; private MetadataFieldConfig field;
@@ -29,15 +28,28 @@ public class CombinedMetadatumContributor<T> implements MetadataContributor<T> {
private MetadataFieldMapping<T,MetadataContributor<T>> metadataFieldMapping; private MetadataFieldMapping<T,MetadataContributor<T>> metadataFieldMapping;
/**
* Initialize an empty CombinedMetadatumContributor object
*/
public CombinedMetadatumContributor() { public CombinedMetadatumContributor() {
} }
/**
*
* @param field {@link org.dspace.importer.external.metadatamapping.MetadataFieldConfig} used in mapping
* @param metadatumContributors A list of MetadataContributor
* @param separator A separator used to differentiate between different values
*/
public CombinedMetadatumContributor(MetadataFieldConfig field, List<MetadataContributor> metadatumContributors, String separator) { public CombinedMetadatumContributor(MetadataFieldConfig field, List<MetadataContributor> metadatumContributors, String separator) {
this.field = field; this.field = field;
this.metadatumContributors = (LinkedList<MetadataContributor>) metadatumContributors; this.metadatumContributors = (LinkedList<MetadataContributor>) metadatumContributors;
this.separator = separator; this.separator = separator;
} }
/**
* Set the metadatafieldMapping used in the transforming of a record to actual metadata
* @param metadataFieldMapping
*/
@Override @Override
public void setMetadataFieldMapping(MetadataFieldMapping<T, MetadataContributor<T>> metadataFieldMapping) { public void setMetadataFieldMapping(MetadataFieldMapping<T, MetadataContributor<T>> metadataFieldMapping) {
this.metadataFieldMapping = metadataFieldMapping; this.metadataFieldMapping = metadataFieldMapping;
@@ -47,18 +59,16 @@ public class CombinedMetadatumContributor<T> implements MetadataContributor<T> {
} }
} }
/** /**
* a separate Metadatum object is created for each index of Metadatum returned from the calls to * a separate Metadatum object is created for each index of Metadatum returned from the calls to
* MetadatumContributor.contributeMetadata(t) for each MetadatumContributor in the metadatumContributors list. * MetadatumContributor.contributeMetadata(t) for each MetadatumContributor in the metadatumContributors list.
* We assume that each contributor returns the same amount of Metadatum objects * We assume that each contributor returns the same amount of Metadatum objects
* @param t the object we are trying to translate * @param t the object we are trying to translate
* @return * @return a collection of metadata composed by each MetadataContributor
*/ */
@Override @Override
public Collection<MetadatumDTO> contributeMetadata(T t) { public Collection<MetadatumDTO> contributeMetadata(T t) {
List<MetadatumDTO> values=new LinkedList<MetadatumDTO>(); List<MetadatumDTO> values=new LinkedList<>();
LinkedList<LinkedList<MetadatumDTO>> metadatumLists = new LinkedList<>(); LinkedList<LinkedList<MetadatumDTO>> metadatumLists = new LinkedList<>();
@@ -84,26 +94,50 @@ public class CombinedMetadatumContributor<T> implements MetadataContributor<T> {
return values; return values;
} }
/**
* Return the MetadataFieldConfig used while retrieving MetadatumDTO
* @return MetadataFieldConfig
*/
public MetadataFieldConfig getField() { public MetadataFieldConfig getField() {
return field; return field;
} }
/**
* Setting the MetadataFieldConfig
* @param field MetadataFieldConfig used while retrieving MetadatumDTO
*/
public void setField(MetadataFieldConfig field) { public void setField(MetadataFieldConfig field) {
this.field = field; this.field = field;
} }
/**
* Return the List of MetadataContributor objects set to this class
* @return metadatumContributors, list of MetadataContributor
*/
public LinkedList<MetadataContributor> getMetadatumContributors() { public LinkedList<MetadataContributor> getMetadatumContributors() {
return metadatumContributors; return metadatumContributors;
} }
/**
* Set the List of MetadataContributor objects set to this class
* @param metadatumContributors A list of MetadatumContributor classes
*/
public void setMetadatumContributors(LinkedList<MetadataContributor> metadatumContributors) { public void setMetadatumContributors(LinkedList<MetadataContributor> metadatumContributors) {
this.metadatumContributors = metadatumContributors; this.metadatumContributors = metadatumContributors;
} }
/**
* Return the separator used to differentiate between distinct values
* @return the separator used to differentiate between distinct values
*/
public String getSeparator() { public String getSeparator() {
return separator; return separator;
} }
/**
* Set the separator used to differentiate between distinct values
* @param separator
*/
public void setSeparator(String separator) { public void setSeparator(String separator) {
this.separator = separator; this.separator = separator;
} }

View File

@@ -13,13 +13,21 @@ import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import java.util.Collection; import java.util.Collection;
/** /**
* Created by Roeland Dillen (roeland at atmire dot com) * @author Roeland Dillen (roeland at atmire dot com)
* Date: 11/01/13
* Time: 09:18
*/ */
public interface MetadataContributor<RecordType> { public interface MetadataContributor<RecordType> {
/**
* Set the metadataFieldMapping
* @param rt the MetadataFieldMapping object to set to the MetadataContributor
*/
public void setMetadataFieldMapping(MetadataFieldMapping<RecordType, MetadataContributor<RecordType>> rt); public void setMetadataFieldMapping(MetadataFieldMapping<RecordType, MetadataContributor<RecordType>> rt);
/**
* Implementations have the responsibility to process/map their own type of metadata based on a given record
* and return a collection of the generalised MetadatumDTO objects
* @param t The recordType object to retrieve metadata from
* @return A collection of MetadatumDTO objects, retrieve from the recordtype
*/
public Collection<MetadatumDTO> contributeMetadata(RecordType t); public Collection<MetadatumDTO> contributeMetadata(RecordType t);
} }

View File

@@ -24,27 +24,42 @@ import java.util.List;
import java.util.Map; import java.util.Map;
/** /**
* Created by Roeland Dillen (roeland at atmire dot com) * Metadata contributor that takes an axiom OMElement and turns it into a metadatum
* Date: 11/01/13 * @author Roeland Dillen (roeland at atmire dot com)
* Time: 09:21
*/ */
public class SimpleXpathMetadatumContributor implements MetadataContributor<OMElement> { public class SimpleXpathMetadatumContributor implements MetadataContributor<OMElement> {
private MetadataFieldConfig field; private MetadataFieldConfig field;
/**
* Return prefixToNamespaceMapping
* @return Map<String, String> prefixToNamespaceMapping
*/
public Map<String, String> getPrefixToNamespaceMapping() { public Map<String, String> getPrefixToNamespaceMapping() {
return prefixToNamespaceMapping; return prefixToNamespaceMapping;
} }
private MetadataFieldMapping<OMElement,MetadataContributor<OMElement>> metadataFieldMapping; private MetadataFieldMapping<OMElement,MetadataContributor<OMElement>> metadataFieldMapping;
/**
* Return metadataFieldMapping
* @return MetadataFieldMapping<OMElement,MetadataContributor<OMElement>> metadataFieldMapping
*/
public MetadataFieldMapping<OMElement,MetadataContributor<OMElement>> getMetadataFieldMapping() { public MetadataFieldMapping<OMElement,MetadataContributor<OMElement>> getMetadataFieldMapping() {
return metadataFieldMapping; return metadataFieldMapping;
} }
/**
* Set the metadataFieldMapping of this SimpleXpathMetadatumContributor
* @param metadataFieldMapping
*/
public void setMetadataFieldMapping(MetadataFieldMapping<OMElement,MetadataContributor<OMElement>> metadataFieldMapping) { public void setMetadataFieldMapping(MetadataFieldMapping<OMElement,MetadataContributor<OMElement>> metadataFieldMapping) {
this.metadataFieldMapping = metadataFieldMapping; this.metadataFieldMapping = metadataFieldMapping;
} }
/**
* Set the prefixToNamespaceMapping for this object,
* @param prefixToNamespaceMapping
*/
@Resource(name="isiFullprefixMapping") @Resource(name="isiFullprefixMapping")
public void setPrefixToNamespaceMapping(Map<String, String> prefixToNamespaceMapping) { public void setPrefixToNamespaceMapping(Map<String, String> prefixToNamespaceMapping) {
this.prefixToNamespaceMapping = prefixToNamespaceMapping; this.prefixToNamespaceMapping = prefixToNamespaceMapping;
@@ -52,26 +67,47 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor<OMEl
private Map<String,String> prefixToNamespaceMapping; private Map<String,String> prefixToNamespaceMapping;
/**
* Initialize SimpleXpathMetadatumContributor with a query, Map<String, String>(prefixToNamespaceMapping) and MetadataFieldConfig(field)
* @param query String
* @param prefixToNamespaceMapping Map<String, String>
* @param field MetadataFieldConfig
*/
public SimpleXpathMetadatumContributor(String query, Map<String, String> prefixToNamespaceMapping, MetadataFieldConfig field) { public SimpleXpathMetadatumContributor(String query, Map<String, String> prefixToNamespaceMapping, MetadataFieldConfig field) {
this.query = query; this.query = query;
this.prefixToNamespaceMapping = prefixToNamespaceMapping; this.prefixToNamespaceMapping = prefixToNamespaceMapping;
this.field = field; this.field = field;
} }
/**
* Empty constructor for SimpleXpathMetadatumContributor
*/
public SimpleXpathMetadatumContributor() { public SimpleXpathMetadatumContributor() {
} }
private String query; private String query;
/**
* Return the MetadataFieldConfig used while retrieving MetadatumDTO
* @return MetadataFieldConfig
*/
public MetadataFieldConfig getField() { public MetadataFieldConfig getField() {
return field; return field;
} }
/**
* Setting the MetadataFieldConfig
* @param field MetadataFieldConfig used while retrieving MetadatumDTO
*/
@Required @Required
public void setField(MetadataFieldConfig field) { public void setField(MetadataFieldConfig field) {
this.field = field; this.field = field;
} }
/**
* Return query used to create an xpathExpression on, this query is used to
* @return the query this instance is based on
*/
public String getQuery() { public String getQuery() {
return query; return query;
} }
@@ -80,9 +116,15 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor<OMEl
this.query = query; this.query = query;
} }
/**
* Retrieve the metadata associated with the given object.
* Depending on the retrieved node (using the query), different types of values will be added to the MetadatumDTO list
* @param t A class to retrieve metadata from.
* @return a collection of import records. Only the identifier of the found records may be put in the record.
*/
@Override @Override
public Collection<MetadatumDTO> contributeMetadata(OMElement t) { public Collection<MetadatumDTO> contributeMetadata(OMElement t) {
List<MetadatumDTO> values=new LinkedList<MetadatumDTO>(); List<MetadatumDTO> values=new LinkedList<>();
try { try {
AXIOMXPath xpath=new AXIOMXPath(query); AXIOMXPath xpath=new AXIOMXPath(query);
for(String ns:prefixToNamespaceMapping.keySet()){ for(String ns:prefixToNamespaceMapping.keySet()){

View File

@@ -0,0 +1,16 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/**
* Contains the classes used to map between retrieved records and actual usable {@link org.dspace.importer.external.metadatamapping.MetadatumDTO}
* Classes are used in the spring config of implementations of {@link org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping}
* which query to an element in the retrieved record and map it to a metadatafield.
* @author Roeland Dillen (roeland at atmire dot com)
* @author Jonas Van Goolen (jonas at atmire dot com)
*/
package org.dspace.importer.external.metadatamapping.contributor;

View File

@@ -0,0 +1,14 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/**
* Main package used for any mapping/processing of retrieved metadata
* @author Roeland Dillen (roeland at atmire dot com)
* @author Jonas Van Goolen (jonas at atmire dot com)
*/
package org.dspace.importer.external.metadatamapping;

View File

@@ -1,22 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.service;
import org.dspace.content.Item;
import org.dspace.importer.external.Query;
import org.dspace.importer.external.MetadataSourceException;
/**
* Created by Roeland Dillen (roeland at atmire dot com)
* Date: 14/12/12
* Time: 11:44
*/
public interface GenerateQueryService {
public Query generateQueryForItem(Item item) throws MetadataSourceException;
}

View File

@@ -1,18 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.service;
/**
* User: kevin (kevin at atmire.com)
* Date: 23/10/12
* Time: 09:49
*/
public interface MetadataProcessorService {
public String processMetadataValue(String value);
}

View File

@@ -5,20 +5,22 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.importer.external.metadatamapping.processor; package org.dspace.importer.external.metadatamapping.transform;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.dspace.importer.external.metadatamapping.service.MetadataProcessorService;
/** /**
* Removes the last point from an author name, this is required for the SAP lookup * Removes the last point from an author name.
* *
* User: kevin (kevin at atmire.com) * @author Kevin Van de Velde (kevin at atmire dot com)
* Date: 23/10/12
* Time: 09:50
*/ */
public class AuthorMetadataProcessorService implements MetadataProcessorService { public class AuthorMetadataProcessorService implements MetadataProcessorService {
/**
* Strip a given value of its last dot (.)
* @param value the value to run the processing over
* @return The initial param with its ending dot stripped
*/
@Override @Override
public String processMetadataValue(String value) { public String processMetadataValue(String value) {
String ret=value; String ret=value;

View File

@@ -0,0 +1,25 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.transform;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
/**
* Represent a service to generate a query based on an item
*
* @author Roeland Dillen (roeland at atmire dot com)
*/
public interface GenerateQueryService {
/* Create a Query object based on a given item.
* Implementations need to make their own decisions as what to add in or leave out of the query
*/
public Query generateQueryForItem(Item item) throws MetadataSourceException;
}

View File

@@ -0,0 +1,21 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.transform;
/**
* Represents an interface to do processing of metadataValues
*
* @author kevin (kevin at atmire.com)
*/
public interface MetadataProcessorService {
/* Process a given metadataValue to make them compliant to specific rules.
* Implementations should regulate their own processing as to what is required for a specific cause
*/
public String processMetadataValue(String value);
}

View File

@@ -0,0 +1,13 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/**
* Transformer classes used by services classes (Such as processing/generating queries, altering results, etc)
* @author Jonas Van Goolen (jonas at atmire dot com)
*/
package org.dspace.importer.external.metadatamapping.transform;

View File

@@ -0,0 +1,14 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/**
* Main package for the live importing from external sources
* @author Roeland Dillen (roeland at atmire dot com)
* @author Jonas Van Goolen (jonas at atmire dot com)
*/
package org.dspace.importer.external;

View File

@@ -16,10 +16,10 @@ I will not go into detail to what exactly is configured for the pubmed integrati
# Additional Config <a name="Additional-config"></a> # # Additional Config <a name="Additional-config"></a> #
To be able to do the lookup for our configured import-service, we need to be able to know what url to use to check for publications. To be able to do the lookup for our configured import-service, we need to be able to know what url to use to check for publications.
This can be done by setting the publication.url property though maven using 2 different ways. This can be done by setting the `publication-lookup.url` property in `publication-lookup.cfg` to one of two settings.
- Setting the publication.url property to the address as defined in the configured importservice (PubmedImportService in this case) in the file spring-dspace-addon-import-services.xml. This will check this single configured url for publications. - Setting the `publication-lookup.url` property to the address as defined in the configured importservice (PubmedImportService in this case) in the file `spring-dspace-addon-import-services.xml`. This will check this single configured url for publications.
- Setting the publication url property to an askterisk '*'. This will check all configured importServices for their urls to base the search for publications on. - Setting the `publication-lookup.url` property to an askterisk '*'. This will check all configured importServices for their urls to base the search for publications on.
# Pubmed specific classes Config <a name="Pubmed-specific"></a> # # Pubmed specific classes Config <a name="Pubmed-specific"></a> #
@@ -33,4 +33,4 @@ These classes are simply implementations based of the base classes defined in im
## Service classes <a name="Service-classes"></a> ## ## Service classes <a name="Service-classes"></a> ##
- "GeneratePubmedQueryService". Generates the pubmed query which is used to retrieve the records. This is based on a given item. - "GeneratePubmedQueryService". Generates the pubmed query which is used to retrieve the records. This is based on a given item.
- "ImportMetadataSourceServiceImpl". Child class of "AbstractImportMetadataSourceService", retrieving the records from pubmed. - "PubmedImportMetadataSourceServiceImpl". Child class of "AbstractImportMetadataSourceService", retrieving the records from pubmed.

View File

@@ -14,10 +14,18 @@ import javax.annotation.Resource;
import java.util.Map; import java.util.Map;
/** /**
* Created by jonas - jonas@atmire.com on 06/11/15. * An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the Pubmed metadatum fields on the DSpace metadatum fields
*
* @author jonas - (jonas at atmire dot com)
*/ */
public class PubmedFieldMapping extends AbstractMetadataFieldMapping { public class PubmedFieldMapping extends AbstractMetadataFieldMapping {
/** Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to the item.
*/
@Override @Override
@Resource (name = "pubmedMetadataFieldMap") @Resource (name = "pubmedMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) { public void setMetadataFieldMap(Map metadataFieldMap) {

View File

@@ -6,7 +6,7 @@
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.importer.external.pubmed.metadatamapping; package org.dspace.importer.external.pubmed.metadatamapping.contributor;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.content.DCDate; import org.dspace.content.DCDate;
@@ -24,15 +24,17 @@ import java.util.LinkedList;
import java.util.List; import java.util.List;
/** /**
* Created by Philip Vissenaekens (philip at atmire dot com) * Pubmed specific implementation of {@link MetadataContributor}
* Date: 06/07/15 * Responsible for generating a set of Date metadata from the retrieved document.
* Time: 13:48 *
* @author Philip Vissenaekens (philip at atmire dot com)
*/ */
public class PubmedDateMetadatumContributor<T> implements MetadataContributor<T> { public class PubmedDateMetadatumContributor<T> implements MetadataContributor<T> {
Logger log = Logger.getLogger(PubmedDateMetadatumContributor.class); Logger log = Logger.getLogger(PubmedDateMetadatumContributor.class);
private MetadataFieldMapping<T, MetadataContributor<T>> metadataFieldMapping; private MetadataFieldMapping<T, MetadataContributor<T>> metadataFieldMapping;
/* A list of all the dateFormats to attempt, these should be configured to have the most specific first and the more lenient at the back */
private List<String> dateFormatsToAttempt; private List<String> dateFormatsToAttempt;
@@ -49,6 +51,10 @@ public class PubmedDateMetadatumContributor<T> implements MetadataContributor<T>
private MetadataContributor month; private MetadataContributor month;
private MetadataContributor year; private MetadataContributor year;
/**
* Set the metadatafieldMapping used in the transforming of a record to actual metadata
* @param metadataFieldMapping
*/
@Override @Override
public void setMetadataFieldMapping(MetadataFieldMapping<T, MetadataContributor<T>> metadataFieldMapping) { public void setMetadataFieldMapping(MetadataFieldMapping<T, MetadataContributor<T>> metadataFieldMapping) {
this.metadataFieldMapping = metadataFieldMapping; this.metadataFieldMapping = metadataFieldMapping;
@@ -56,10 +62,19 @@ public class PubmedDateMetadatumContributor<T> implements MetadataContributor<T>
month.setMetadataFieldMapping(metadataFieldMapping); month.setMetadataFieldMapping(metadataFieldMapping);
year.setMetadataFieldMapping(metadataFieldMapping); year.setMetadataFieldMapping(metadataFieldMapping);
} }
/**
* Initialize an empty PubmedDateMetadatumContributor object
*/
public PubmedDateMetadatumContributor() { public PubmedDateMetadatumContributor() {
} }
/**
*
* @param field {@link org.dspace.importer.external.metadatamapping.MetadataFieldConfig} used in mapping
* @param day a MetadataContributor, representing a day
* @param month a {@link MetadataContributor}, representing a month
* @param year a {@link MetadataContributor}, representing a year
*/
public PubmedDateMetadatumContributor(MetadataFieldConfig field, MetadataContributor day, MetadataContributor month, MetadataContributor year) { public PubmedDateMetadatumContributor(MetadataFieldConfig field, MetadataContributor day, MetadataContributor month, MetadataContributor year) {
this.field = field; this.field = field;
this.day = day; this.day = day;
@@ -67,9 +82,16 @@ public class PubmedDateMetadatumContributor<T> implements MetadataContributor<T>
this.year = year; this.year = year;
} }
/**
* Retrieve the metadata associated with the given object.
* The code will loop over the different dates and attempt to format them using the configured dateFormats to attempt.
* For each date, once a format is successful, this result is used. Make sure that dateFormatsToAttempt is configured from most restrictive to most lenient to try and get the most precise result
* @param t A class to retrieve metadata from.
* @return a collection of import records. Only the identifier of the found records may be put in the record.
*/
@Override @Override
public Collection<MetadatumDTO> contributeMetadata(T t) { public Collection<MetadatumDTO> contributeMetadata(T t) {
List<MetadatumDTO> values = new LinkedList<MetadatumDTO>(); List<MetadatumDTO> values = new LinkedList<>();
try { try {
@@ -89,18 +111,21 @@ public class PubmedDateMetadatumContributor<T> implements MetadataContributor<T>
dateString = yearList.get(i).getValue(); dateString = yearList.get(i).getValue();
} }
int j = 0 ;
for (String dateFormat : dateFormatsToAttempt) { // Use the first dcDate that has been formatted (Config should go from most specific to most lenient)
while (j<dateFormatsToAttempt.size() && dcDate==null){
String dateFormat = dateFormatsToAttempt.get(j);
try { try {
SimpleDateFormat formatter = new SimpleDateFormat(dateFormat); SimpleDateFormat formatter = new SimpleDateFormat(dateFormat);
Date date = formatter.parse(dateString); Date date = formatter.parse(dateString);
dcDate = new DCDate(date); dcDate = new DCDate(date);
} catch (ParseException e) { } catch (ParseException e) {
log.error(e.getMessage(), e); // Multiple dateformats can be configured, we don't want to print the entire stacktrace every time one of those formats fails.
log.info("Failed parsing "+dateString+" using the following format: "+dateFormat+ ", check the configured dataformats in config/spring/api/pubmed-integration.xml");
} }
j++;
} }
if (dcDate != null) { if (dcDate != null) {
values.add(metadataFieldMapping.toDCValue(field, dcDate.toString())); values.add(metadataFieldMapping.toDCValue(field, dcDate.toString()));
} }
@@ -111,34 +136,66 @@ public class PubmedDateMetadatumContributor<T> implements MetadataContributor<T>
return values; return values;
} }
/**
* Return the MetadataFieldConfig used while retrieving MetadatumDTO
* @return MetadataFieldConfig
*/
public MetadataFieldConfig getField() { public MetadataFieldConfig getField() {
return field; return field;
} }
/**
* Setting the MetadataFieldConfig
* @param field MetadataFieldConfig used while retrieving MetadatumDTO
*/
public void setField(MetadataFieldConfig field) { public void setField(MetadataFieldConfig field) {
this.field = field; this.field = field;
} }
/**
* Retrieve the day from the object
* @return {@link MetadataContributor}, representing a day
*/
public MetadataContributor getDay() { public MetadataContributor getDay() {
return day; return day;
} }
/**
* Set a day ({@link MetadataContributor}) to this object
* @param day a {@link MetadataContributor}, representing a day
*/
public void setDay(MetadataContributor day) { public void setDay(MetadataContributor day) {
this.day = day; this.day = day;
} }
/**
* Retrieve the month from the object
* @return {@link MetadataContributor}, representing a month
*/
public MetadataContributor getMonth() { public MetadataContributor getMonth() {
return month; return month;
} }
/**
* Set a month ({@link MetadataContributor}) to this object
* @param month a {@link MetadataContributor}, representing a month
*/
public void setMonth(MetadataContributor month) { public void setMonth(MetadataContributor month) {
this.month = month; this.month = month;
} }
/**
* Retrieve the year from the object
* @return {@link MetadataContributor}, representing a year
*/
public MetadataContributor getYear() { public MetadataContributor getYear() {
return year; return year;
} }
/**
* Set a year ({@link MetadataContributor}) to this object
* @param year a {@link MetadataContributor}, representing a year
*/
public void setYear(MetadataContributor year) { public void setYear(MetadataContributor year) {
this.year = year; this.year = year;
} }

View File

@@ -6,7 +6,7 @@
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.importer.external.pubmed.metadatamapping; package org.dspace.importer.external.pubmed.metadatamapping.contributor;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
@@ -16,9 +16,10 @@ import org.dspace.importer.external.metadatamapping.contributor.MetadataContribu
import java.util.*; import java.util.*;
/** /**
* Created by Philip Vissenaekens (philip at atmire dot com) * Pubmed specific implementation of {@link MetadataContributor}
* Date: 07/07/15 * Responsible for generating a set of Language metadata from the retrieved document.
* Time: 15:08 *
* @author Philip Vissenaekens (philip at atmire dot com)
*/ */
public class PubmedLanguageMetadatumContributor<T> implements MetadataContributor<T> { public class PubmedLanguageMetadatumContributor<T> implements MetadataContributor<T> {
Logger log = Logger.getLogger(PubmedDateMetadatumContributor.class); Logger log = Logger.getLogger(PubmedDateMetadatumContributor.class);
@@ -29,25 +30,42 @@ public class PubmedLanguageMetadatumContributor<T> implements MetadataContributo
private MetadataFieldConfig field; private MetadataFieldConfig field;
private MetadataContributor language; private MetadataContributor language;
/**
* Initialize PubmedLanguageMetadatumContributor and create the iso3toiso2 mapping used in the transforming of language codes
*/
public PubmedLanguageMetadatumContributor() { public PubmedLanguageMetadatumContributor() {
iso3toIso2=new HashMap<>(); iso3toIso2=new HashMap<>();
// Populate the languageMap with the mapping between iso3 and iso2 language codes
for (Locale locale : Locale.getAvailableLocales()) { for (Locale locale : Locale.getAvailableLocales()) {
iso3toIso2.put(locale.getISO3Language(),locale.getLanguage()); iso3toIso2.put(locale.getISO3Language(),locale.getLanguage());
} }
} }
/**
* Initialize the PubmedLanguageMetadatumContributor class using a {@link org.dspace.importer.external.metadatamapping.MetadataFieldConfig} and a language -{@link org.dspace.importer.external.metadatamapping.contributor.MetadataContributor}
* @param field {@link org.dspace.importer.external.metadatamapping.MetadataFieldConfig} used in mapping
* @param language
*/
public PubmedLanguageMetadatumContributor(MetadataFieldConfig field, MetadataContributor language) { public PubmedLanguageMetadatumContributor(MetadataFieldConfig field, MetadataContributor language) {
this(); this();
this.field = field; this.field = field;
this.language = language; this.language = language;
} }
/**
* Set the metadatafieldMapping used in the transforming of a record to actual metadata
* @param metadataFieldMapping
*/
@Override @Override
public void setMetadataFieldMapping(MetadataFieldMapping<T, MetadataContributor<T>> metadataFieldMapping) { public void setMetadataFieldMapping(MetadataFieldMapping<T, MetadataContributor<T>> metadataFieldMapping) {
this.metadataFieldMapping = metadataFieldMapping; this.metadataFieldMapping = metadataFieldMapping;
language.setMetadataFieldMapping(metadataFieldMapping); language.setMetadataFieldMapping(metadataFieldMapping);
} }
/**
* @param t A class to retrieve metadata from.
* @return a collection of import records. Only the identifier of the found records may be put in the record.
*/
@Override @Override
public Collection<MetadatumDTO> contributeMetadata(T t) { public Collection<MetadatumDTO> contributeMetadata(T t) {
List<MetadatumDTO> values=new LinkedList<MetadatumDTO>(); List<MetadatumDTO> values=new LinkedList<MetadatumDTO>();
@@ -56,7 +74,7 @@ public class PubmedLanguageMetadatumContributor<T> implements MetadataContributo
LinkedList<MetadatumDTO> languageList = (LinkedList<MetadatumDTO>) language.contributeMetadata(t); LinkedList<MetadatumDTO> languageList = (LinkedList<MetadatumDTO>) language.contributeMetadata(t);
for (MetadatumDTO metadatum : languageList) { for (MetadatumDTO metadatum : languageList) {
// Add the iso2 language code corresponding to the retrieved iso3 code to the metadata
values.add(metadataFieldMapping.toDCValue(field, iso3toIso2.get(metadatum.getValue().toLowerCase()))); values.add(metadataFieldMapping.toDCValue(field, iso3toIso2.get(metadatum.getValue().toLowerCase())));
} }
} catch (Exception e) { } catch (Exception e) {
@@ -66,18 +84,34 @@ public class PubmedLanguageMetadatumContributor<T> implements MetadataContributo
return values; return values;
} }
/**
* Return the MetadataContributor used while retrieving MetadatumDTO
* @return MetadataContributor
*/
public MetadataContributor getLanguage() { public MetadataContributor getLanguage() {
return language; return language;
} }
/**
* Setting the MetadataContributor
* @param language MetadataContributor used while retrieving MetadatumDTO
*/
public void setLanguage(MetadataContributor language) { public void setLanguage(MetadataContributor language) {
this.language = language; this.language = language;
} }
/**
* Return the MetadataFieldConfig used while retrieving MetadatumDTO
* @return MetadataFieldConfig
*/
public MetadataFieldConfig getField() { public MetadataFieldConfig getField() {
return field; return field;
} }
/**
* Setting the MetadataFieldConfig
* @param field MetadataFieldConfig used while retrieving MetadatumDTO
*/
public void setField(MetadataFieldConfig field) { public void setField(MetadataFieldConfig field) {
this.field = field; this.field = field;
} }

View File

@@ -0,0 +1,15 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/**
* Pubmed implementation of specific {@link org.dspace.importer.external.metadatamapping.contributor} classes,
* defining the mapping between the retrieved record and dspace metadata
* @author Roeland Dillen (roeland at atmire dot com)
* @author Jonas Van Goolen (jonas at atmire dot com)
*/
package org.dspace.importer.external.pubmed.metadatamapping.contributor;

View File

@@ -0,0 +1,14 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/**
* Pubmed implementation of {@link org.dspace.importer.external.metadatamapping.contributor.MetadataContributor},
* Responsible for generating a set of specific metadata from the retrieved document.
* @author Roeland Dillen (roeland at atmire dot com)
*/
package org.dspace.importer.external.pubmed.metadatamapping;

View File

@@ -6,26 +6,40 @@
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.importer.external.pubmed.metadatamapping.service; package org.dspace.importer.external.pubmed.metadatamapping.transform;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.importer.external.MetadataSourceException; import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.Query; import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.metadatamapping.service.GenerateQueryService; import org.dspace.importer.external.metadatamapping.transform.GenerateQueryService;
import java.util.List; import java.util.List;
/** /**
* Created by jonas - jonas@atmire.com on 06/11/15. * This class is an implementation of {@link GenerateQueryService}
* Represents a service that generates the pubmed query which is used to retrieve the records.
* This is based on a given item.
*
* @author Jonas - (jonas at atmire dot com)
*/ */
public class GeneratePubmedQueryService implements GenerateQueryService { public class GeneratePubmedQueryService implements GenerateQueryService {
/**
* Create a Query object based on a given item.
* If the item has at least 1 value for dc.identifier.doi, the first one will be used.
* If no DOI is found, the title will be used.
* When no DOI or title is found, an null object is returned instead.
* @param item the Item to create a Query from
*/
@Override @Override
public Query generateQueryForItem(Item item) throws MetadataSourceException { public Query generateQueryForItem(Item item) throws MetadataSourceException {
Query query = new Query(); Query query = new Query();
// Retrieve an instance of the ItemService to access business calls on an item.
ItemService itemService = ContentServiceFactory.getInstance().getItemService(); ItemService itemService = ContentServiceFactory.getInstance().getItemService();
List<MetadataValue> doi = itemService.getMetadata(item, "dc", "identifier", "doi", Item.ANY); List<MetadataValue> doi = itemService.getMetadata(item, "dc", "identifier", "doi", Item.ANY);

View File

@@ -0,0 +1,13 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/**
* Pubmed implementation of specific {@link org.dspace.importer.external.metadatamapping.transform} classes
* @author Jonas Van Goolen (jonas at atmire dot com)
*/
package org.dspace.importer.external.pubmed.metadatamapping.transform;

View File

@@ -0,0 +1,14 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/**
* Pubmed implementation of {@link org.dspace.importer.external.service.components.MetadataSource}
* Based on metadata support in {@link org.dspace.importer.external.metadatamapping}
* @author Roeland Dillen (roeland at atmire dot com)
*/
package org.dspace.importer.external.pubmed;

View File

@@ -6,16 +6,17 @@
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.importer.external.pubmed.service.other; package org.dspace.importer.external.pubmed.service;
import org.apache.axiom.om.OMElement; import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.OMXMLBuilderFactory; import org.apache.axiom.om.OMXMLBuilderFactory;
import org.apache.axiom.om.OMXMLParserWrapper; import org.apache.axiom.om.OMXMLParserWrapper;
import org.apache.axiom.om.xpath.AXIOMXPath; import org.apache.axiom.om.xpath.AXIOMXPath;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.importer.external.MetadataSourceException; import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.Query; import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.datamodel.ImportRecord; import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.jaxen.JaxenException; import org.jaxen.JaxenException;
import javax.ws.rs.client.Client; import javax.ws.rs.client.Client;
@@ -31,69 +32,135 @@ import java.util.List;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
/** /**
* Created by jonas - jonas@atmire.com on 06/11/15. * Implements a data source for querying pubmed central
*
* @author Roeland Dillen (roeland at atmire dot com)
*/ */
public class ImportMetadataSourceServiceImpl extends org.dspace.importer.external.service.AbstractImportMetadataSourceService { public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<OMElement> {
private String baseAddress; private String baseAddress;
private WebTarget pubmedWebTarget; private WebTarget pubmedWebTarget;
/** Find the number of records matching a query;
*
* @param query a query string to base the search on.
* @return the sum of the matching records over this import source
* @throws MetadataSourceException
*/
@Override @Override
public int getNbRecords(String query) throws MetadataSourceException { public int getNbRecords(String query) throws MetadataSourceException {
return retry(new GetNbRecords(query)); return retry(new GetNbRecords(query));
} }
@Override /** Find the number of records matching a query;
*
* @param query a query object to base the search on.
* @return the sum of the matching records over this import source
* @throws MetadataSourceException
*/ @Override
public int getNbRecords(Query query) throws MetadataSourceException { public int getNbRecords(Query query) throws MetadataSourceException {
return retry(new GetNbRecords(query)); return retry(new GetNbRecords(query));
} }
/** Find the number of records matching a string query. Supports pagination
*
* @param query a query string to base the search on.
* @param start offset to start at
* @param count number of records to retrieve.
* @return a set of records. Fully transformed.
* @throws MetadataSourceException
*/
@Override @Override
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException { public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
return retry(new GetRecords(query, start, count)); return retry(new GetRecords(query, start, count));
} }
/** Find records based on a object query.
*
* @param query a query object to base the search on.
* @return a set of records. Fully transformed.
* @throws MetadataSourceException
*/
@Override @Override
public Collection<ImportRecord> getRecords(Query q) throws MetadataSourceException { public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
return retry(new GetRecords(q)); return retry(new GetRecords(query));
} }
/** Get a single record from the source.
* The first match will be returned
* @param id identifier for the record
* @return a matching record
* @throws MetadataSourceException
*/
@Override @Override
public ImportRecord getRecord(String id) throws MetadataSourceException { public ImportRecord getRecord(String id) throws MetadataSourceException {
return retry(new GetRecord(id)); return retry(new GetRecord(id));
} }
/** Get a single record from the source.
* The first match will be returned
* @param query a query matching a single record
* @return a matching record
* @throws MetadataSourceException
*/
@Override @Override
public ImportRecord getRecord(Query q) throws MetadataSourceException { public ImportRecord getRecord(Query query) throws MetadataSourceException {
return retry(new GetRecord(q)); return retry(new GetRecord(query));
} }
/**
* The string that identifies this import implementation. Preferable a URI
* @return the identifying uri
*/
@Override @Override
public String getImportSource() { public String getImportSource() {
return baseAddress; return "http://eutils.ncbi.nlm.nih.gov/entrez/eutils/";
} }
/** Finds records based on an item
* @param item an item to base the search on
* @return a collection of import records. Only the identifier of the found records may be put in the record.
* @throws MetadataSourceException if the underlying methods throw any exception.
*/
@Override @Override
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException { public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
return retry(new FindMatchingRecords(item)); return retry(new FindMatchingRecords(item));
} }
/** Finds records based on query object.
* Delegates to one or more MetadataSource implementations based on the uri. Results will be aggregated.
* @param query a query object to base the search on.
* @return a collection of import records. Only the identifier of the found records may be put in the record.
* @throws MetadataSourceException
*/
@Override @Override
public Collection<ImportRecord> findMatchingRecords(Query q) throws MetadataSourceException { public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
return retry(new FindMatchingRecords(q)); return retry(new FindMatchingRecords(query));
} }
/**
* Initialize the class
* @throws Exception
*/
@Override @Override
public void init() throws Exception { public void init() throws Exception {
Client client = ClientBuilder.newClient(); Client client = ClientBuilder.newClient();
WebTarget webTarget = client.target(baseAddress); WebTarget webTarget = client.target(baseAddress);
pubmedWebTarget = webTarget.queryParam("db", "pubmed"); pubmedWebTarget = webTarget.queryParam("db", "pubmed");
} }
/**
* Return the baseAddress set to this object
* @return The String object that represents the baseAddress of this object
*/
public String getBaseAddress() { public String getBaseAddress() {
return baseAddress; return baseAddress;
} }
/**
* Set the baseAddress to this object
* @param baseAddress The String object that represents the baseAddress of this object
*/
public void setBaseAddress(String baseAddress) { public void setBaseAddress(String baseAddress) {
this.baseAddress = baseAddress; this.baseAddress = baseAddress;
} }

View File

@@ -0,0 +1,13 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/**
* Pubmed implementation of specific {@link org.dspace.importer.external.service} classes
* @author Jonas Van Goolen (jonas at atmire dot com)
*/
package org.dspace.importer.external.pubmed.service;

View File

@@ -10,44 +10,68 @@ package org.dspace.importer.external.service;
import org.dspace.importer.external.datamodel.ImportRecord; import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.dspace.importer.external.metadatamapping.contributor.MetadataContributor; import org.dspace.importer.external.metadatamapping.contributor.MetadataContributor;
import org.dspace.importer.external.metadatamapping.service.GenerateQueryService; import org.dspace.importer.external.service.components.MetadataSource;
import org.dspace.importer.external.service.other.Imports; import org.dspace.importer.external.service.components.AbstractRemoteMetadataSource;
import org.dspace.importer.external.service.other.MetadataSource; import org.dspace.importer.external.metadatamapping.transform.GenerateQueryService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required; import org.springframework.beans.factory.annotation.Required;
import java.util.LinkedList; import java.util.LinkedList;
/** /**
* Created by: Roeland Dillen (roeland at atmire dot com) * This class is a partial implementation of {@link MetadataSource}. It provides assistance with mapping metadata from source format to DSpace format.
* Date: 29 May 2015 * AbstractImportSourceService has a generic type set 'RecordType'.
* In the importer implementation this type set should be the class of the records received from the remote source's response.
*
* @author Roeland Dillen (roeland at atmire dot com)
*
*/ */
public abstract class AbstractImportMetadataSourceService<RecordType> extends MetadataSource implements Imports { public abstract class AbstractImportMetadataSourceService<RecordType> extends AbstractRemoteMetadataSource implements MetadataSource {
private GenerateQueryService generateQueryForItem = null; private GenerateQueryService generateQueryForItem = null;
private MetadataFieldMapping<RecordType, MetadataContributor<RecordType>> metadataFieldMapping; private MetadataFieldMapping<RecordType, MetadataContributor<RecordType>> metadataFieldMapping;
/**
* Retrieve the {@link GenerateQueryService}
* @return A GenerateForQueryService object set to this class
*/
public GenerateQueryService getGenerateQueryForItem() { public GenerateQueryService getGenerateQueryForItem() {
return generateQueryForItem; return generateQueryForItem;
} }
@Autowired /**
* Set the {@link GenerateQueryService} used to create a {@link org.dspace.importer.external.datamodel.Query} for a DSpace {@link org.dspace.content.Item}
* @param generateQueryForItem
*/
@Autowired
public void setGenerateQueryForItem(GenerateQueryService generateQueryForItem) { public void setGenerateQueryForItem(GenerateQueryService generateQueryForItem) {
this.generateQueryForItem = generateQueryForItem; this.generateQueryForItem = generateQueryForItem;
} }
/**
* Retrieve the MetadataFieldMapping containing the mapping between RecordType and Metadata
* @return The configured MetadataFieldMapping
*/
public MetadataFieldMapping<RecordType, MetadataContributor<RecordType>> getMetadataFieldMapping() { public MetadataFieldMapping<RecordType, MetadataContributor<RecordType>> getMetadataFieldMapping() {
return metadataFieldMapping; return metadataFieldMapping;
} }
/**
* Sets the MetadataFieldMapping to base the mapping of RecordType and
* @param metadataFieldMapping
*/
@Required @Required
public void setMetadataFieldMapping( public void setMetadataFieldMapping(
MetadataFieldMapping<RecordType, MetadataContributor<RecordType>> metadataFieldMapping) { MetadataFieldMapping<RecordType, MetadataContributor<RecordType>> metadataFieldMapping) {
this.metadataFieldMapping = metadataFieldMapping; this.metadataFieldMapping = metadataFieldMapping;
} }
public ImportRecord transformSourceRecords(RecordType rt){ /**
return new ImportRecord(new LinkedList<MetadatumDTO>(getMetadataFieldMapping().resultToDCValueMapping(rt))); * Return an ImportRecord constructed from the results in a RecordType
* @param recordType The recordtype to retrieve the DCValueMapping from
* @return An {@link ImportRecord}, This is based on the results retrieved from the recordTypeMapping
*/
public ImportRecord transformSourceRecords(RecordType recordType){
return new ImportRecord(new LinkedList<>(getMetadataFieldMapping().resultToDCValueMapping(recordType)));
} }
} }

View File

@@ -10,62 +10,86 @@ package org.dspace.importer.external.service;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.importer.external.MetadataSourceException; import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.Query; import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.datamodel.ImportRecord; import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.service.other.Destroyable; import org.dspace.importer.external.service.components.Destroyable;
import org.dspace.importer.external.service.other.Imports; import org.dspace.importer.external.service.components.MetadataSource;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import java.util.*; import java.util.*;
/** /** Main entry point for the import framework.
* Created by Roeland Dillen (roeland at atmire dot com) * Instead of calling the different importer implementations, the ImportService should be called instead.
* Date: 17/09/12 * This class contains the same methods as the other implementations, but has an extra parameter URL.
* Time: 14:19 * This URL should be the same identifier that is returned by the "getImportSource" method that is defined in the importer implementation you want to use.
* @author Roeland Dillen (roeland at atmire dot com)
*/ */
public class ImportService implements Destroyable { public class ImportService implements Destroyable {
private HashMap<String, Imports> importSources = new HashMap<String, Imports>(); private HashMap<String, MetadataSource> importSources = new HashMap<>();
Logger log = Logger.getLogger(ImportService.class); Logger log = Logger.getLogger(ImportService.class);
/**
* Constructs an empty ImportService class object
*/
public ImportService() { public ImportService() {
} }
protected static final String ANY = "*"; protected static final String ANY = "*";
/**
* Sets the importsources that will be used to delegate the retrieving and matching of records to
* @param importSources A list of {@link MetadataSource} to set to this service
* @throws MetadataSourceException
*/
@Autowired(required = false) @Autowired(required = false)
public void setImportSources(List<Imports> importSources) throws MetadataSourceException { public void setImportSources(List<MetadataSource> importSources) throws MetadataSourceException {
log.info("Loading " + importSources.size() + " import sources."); log.info("Loading " + importSources.size() + " import sources.");
for (Imports imports : importSources) { for (MetadataSource metadataSource : importSources) {
this.importSources.put(imports.getImportSource(), imports); this.importSources.put(metadataSource.getImportSource(), metadataSource);
} }
} }
protected Map<String, Imports> getImportSources() { /**
* Retrieve the importSources set to this class.
* @return An unmodifiableMap of importSources
*/
protected Map<String, MetadataSource> getImportSources() {
return Collections.unmodifiableMap(importSources); return Collections.unmodifiableMap(importSources);
} }
protected Collection<Imports> matchingImports(String url) { /**
if (ANY.equals(url)) { * Utility method to find what import implementations match the imports uri.
* @param uri the identifier of the import implementation or * for all
* @return matching MetadataSource implementations
*/
protected Collection<MetadataSource> matchingImports(String uri) {
if (ANY.equals(uri)) {
return importSources.values(); return importSources.values();
} else { } else {
if(importSources.containsKey(url)) if(importSources.containsKey(uri))
return Collections.singletonList(importSources.get(url)); return Collections.singletonList(importSources.get(uri));
else else
return Collections.emptyList(); return Collections.emptyList();
} }
} }
/** Finds records based on an item
public Collection<ImportRecord> findMatchingRecords(String url, Item item) throws MetadataSourceException { * Delegates to one or more MetadataSource implementations based on the uri. Results will be aggregated.
* @param uri the identifier of the import implementation or * for all
* @param item an item to base the search on
* @return a collection of import records. Only the identifier of the found records may be put in the record.
* @throws MetadataSourceException if the underlying imports throw any exception.
*/
public Collection<ImportRecord> findMatchingRecords(String uri, Item item) throws MetadataSourceException {
try { try {
List<ImportRecord> recordList = new LinkedList<ImportRecord>(); List<ImportRecord> recordList = new LinkedList<ImportRecord>();
for (Imports imports : matchingImports(url)) { for (MetadataSource metadataSource : matchingImports(uri)) {
recordList.addAll(imports.findMatchingRecords(item)); recordList.addAll(metadataSource.findMatchingRecords(item));
} }
return recordList; return recordList;
@@ -74,11 +98,18 @@ public class ImportService implements Destroyable {
} }
} }
public Collection<ImportRecord> findMatchingRecords(String url, Query query) throws MetadataSourceException { /** Finds records based on query object.
* Delegates to one or more MetadataSource implementations based on the uri. Results will be aggregated.
* @param uri the identifier of the import implementation or * for all
* @param query a query object to base the search on. The implementation decides how the query is interpreted.
* @return a collection of import records. Only the identifier of the found records may be put in the record.
* @throws MetadataSourceException
*/
public Collection<ImportRecord> findMatchingRecords(String uri, Query query) throws MetadataSourceException {
try { try {
List<ImportRecord> recordList = new LinkedList<ImportRecord>(); List<ImportRecord> recordList = new LinkedList<ImportRecord>();
for (Imports imports : matchingImports(url)) { for (MetadataSource metadataSource : matchingImports(uri)) {
recordList.addAll(imports.findMatchingRecords(query)); recordList.addAll(metadataSource.findMatchingRecords(query));
} }
return recordList; return recordList;
@@ -87,11 +118,36 @@ public class ImportService implements Destroyable {
} }
} }
public int getNbRecords(String url, String query) throws MetadataSourceException { /** Find the number of records matching a string query;
*
* @param uri the identifier of the import implementation or * for all
* @param query a query to base the search on
* @return the sum of the matching records over all import sources
* @throws MetadataSourceException
*/
public int getNbRecords(String uri, String query) throws MetadataSourceException {
try { try {
int total = 0; int total = 0;
for (Imports Imports : matchingImports(url)) { for (MetadataSource MetadataSource : matchingImports(uri)) {
total += Imports.getNbRecords(query); total += MetadataSource.getNbRecords(query);
}
return total;
} catch (Exception e) {
throw new MetadataSourceException(e);
}
}
/** Find the number of records matching a query;
*
* @param uri the identifier of the import implementation or * for all
* @param query a query object to base the search on The implementation decides how the query is interpreted.
* @return the sum of the matching records over all import sources
* @throws MetadataSourceException
*/
public int getNbRecords(String uri, Query query) throws MetadataSourceException {
try {
int total = 0;
for (MetadataSource MetadataSource : matchingImports(uri)) {
total += MetadataSource.getNbRecords(query);
} }
return total; return total;
} catch (Exception e) { } catch (Exception e) {
@@ -99,35 +155,20 @@ public class ImportService implements Destroyable {
} }
} }
public int getNbRecords(String url, Query query) throws MetadataSourceException { /** Find the number of records matching a string query. Supports pagination
*
* @param uri the identifier of the import implementation or * for all
* @param query a query object to base the search on. The implementation decides how the query is interpreted.
* @param start offset to start at
* @param count number of records to retrieve.
* @return a set of records. Fully transformed.
* @throws MetadataSourceException
*/
public Collection<ImportRecord> getRecords(String uri, String query, int start, int count) throws MetadataSourceException {
try { try {
int total = 0; List<ImportRecord> recordList = new LinkedList<>();
for (Imports Imports : matchingImports(url)) { for (MetadataSource metadataSource : matchingImports(uri)) {
total += Imports.getNbRecords(query); recordList.addAll(metadataSource.getRecords(query, start, count));
}
return total;
} catch (Exception e) {
throw new MetadataSourceException(e);
}
}
public Collection<ImportRecord> getRecords(String url, String query, int start, int count) throws MetadataSourceException {
try {
List<ImportRecord> recordList = new LinkedList<ImportRecord>();
for (Imports imports : matchingImports(url)) {
recordList.addAll(imports.getRecords(query, start, count));
}
return recordList;
} catch (Exception e) {
throw new MetadataSourceException(e);
}
}
public Collection<ImportRecord> getRecords(String url, Query query) throws MetadataSourceException {
try {
List<ImportRecord> recordList = new LinkedList<ImportRecord>();
for (Imports imports : matchingImports(url)) {
recordList.addAll(imports.getRecords(query));
} }
return recordList; return recordList;
} catch (Exception e) { } catch (Exception e) {
@@ -135,23 +176,54 @@ public class ImportService implements Destroyable {
} }
} }
/** Find the number of records matching a object query.
public ImportRecord getRecord(String url, String id) throws MetadataSourceException { *
* @param uri the identifier of the import implementation or * for all
* @param query a query object to base the search on. The implementation decides how the query is interpreted.
* @return a set of records. Fully transformed.
* @throws MetadataSourceException
*/
public Collection<ImportRecord> getRecords(String uri, Query query) throws MetadataSourceException {
try { try {
for (Imports imports : matchingImports(url)) { List<ImportRecord> recordList = new LinkedList<>();
if (imports.getRecord(id) != null) return imports.getRecord(id); for (MetadataSource metadataSource : matchingImports(uri)) {
recordList.addAll(metadataSource.getRecords(query));
} }
return null; return recordList;
} catch (Exception e) { } catch (Exception e) {
throw new MetadataSourceException(e); throw new MetadataSourceException(e);
} }
} }
public ImportRecord getRecord(String url, Query query) throws MetadataSourceException { /** Get a single record from a source.
try { * The first match will be returned
for (Imports imports : matchingImports(url)) { * @param uri uri the identifier of the import implementation or * for all
if (imports.getRecord(query) != null) return imports.getRecord(query); * @param id identifier for the record
* @return a matching record
* @throws MetadataSourceException
*/
public ImportRecord getRecord(String uri, String id) throws MetadataSourceException {
try {
for (MetadataSource metadataSource : matchingImports(uri)) {
if (metadataSource.getRecord(id) != null) return metadataSource.getRecord(id);
}
return null;
} catch (Exception e) {
throw new MetadataSourceException(e);
}
}
/** Get a single record from the source.
* The first match will be returned
* @param uri uri the identifier of the import implementation or * for all
* @param query a query matching a single record
* @return a matching record
* @throws MetadataSourceException
*/
public ImportRecord getRecord(String uri, Query query) throws MetadataSourceException {
try {
for (MetadataSource metadataSource : matchingImports(uri)) {
if (metadataSource.getRecord(query) != null) return metadataSource.getRecord(query);
} }
return null; return null;
@@ -160,15 +232,19 @@ public class ImportService implements Destroyable {
} }
} }
/** Retrieve the importUrls that are set on the importSources .
* @return a Collection of string, representing the configured importUrls
*/
public Collection<String> getImportUrls() { public Collection<String> getImportUrls() {
return importSources.keySet(); return importSources.keySet();
} }
/** Call destroy on all {@link Destroyable} {@link MetadataSource} objects set in this ImportService
*/
@Override @Override
public void destroy() throws Exception { public void destroy() throws Exception {
for (Imports imports : importSources.values()) { for (MetadataSource metadataSource : importSources.values()) {
if (imports instanceof Destroyable) ((Destroyable) imports).destroy(); if (metadataSource instanceof Destroyable) ((Destroyable) metadataSource).destroy();
} }
} }
} }

View File

@@ -6,11 +6,11 @@
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.importer.external.service.other; package org.dspace.importer.external.service.components;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.importer.external.MetadataSourceException; import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.SourceExceptionHandler; import org.dspace.importer.external.exception.SourceExceptionHandler;
import javax.annotation.Resource; import javax.annotation.Resource;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
@@ -21,10 +21,14 @@ import java.util.concurrent.Callable;
import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantLock;
/** /**
* Created by: Antoine Snyers (antoine at atmire dot com) * This class contains primitives to handle request timeouts and to retry requests.
* Date: 27 Oct 2014 * This is achieved by classifying exceptions as fatal or as non fatal/retryable.
* Evidently only subclasses can make the proper determination of what is retryable and what isn't.
* This is useful in case the service employs throttling and to deal with general network issues.
* @author Roeland Dillen (roeland at atmire dot com)
* @author Antoine Snyers (antoine at atmire dot com)
*/ */
public abstract class MetadataSource { public abstract class AbstractRemoteMetadataSource {
protected long lastRequest = 0; protected long lastRequest = 0;
protected long interRequestTime; protected long interRequestTime;
@@ -39,45 +43,82 @@ public abstract class MetadataSource {
protected Map<Class, List<SourceExceptionHandler>> exceptionHandlersMap; protected Map<Class, List<SourceExceptionHandler>> exceptionHandlersMap;
protected Exception error; protected Exception error;
/**
protected MetadataSource() { * Constructs an empty MetadataSource class object and initializes the Exceptionhandlers
*/
protected AbstractRemoteMetadataSource() {
initExceptionHandlers(); initExceptionHandlers();
} }
/**
* initialize the exceptionHandlersMap with an empty {@link java.util.LinkedHashMap}
*/
protected void initExceptionHandlers() { protected void initExceptionHandlers() {
exceptionHandlersMap = new LinkedHashMap<Class, List<SourceExceptionHandler>>(); exceptionHandlersMap = new LinkedHashMap<>();
// if an exception is thrown that is not in there, it is not recoverable and the retry chain will stop // if an exception is thrown that is not in there, it is not recoverable and the retry chain will stop
// by default all exceptions are fatal, but subclasses can add their own handlers for their own exceptions // by default all exceptions are fatal, but subclasses can add their own handlers for their own exceptions
} }
/**
* Return the warning message used for logging during exception catching
* @return a "warning" String
*/
public String getWarning() { public String getWarning() {
return warning; return warning;
} }
/**
* Set the warning message used for logging
* @param warning
*/
public void setWarning(String warning) { public void setWarning(String warning) {
this.warning = warning; this.warning = warning;
} }
/**
* Return the number of retries that have currently been undertaken
* @return the number of retries
*/
public int getRetry() { public int getRetry() {
return retry; return retry;
} }
/**
* Return the number of max retries that can be undertaken before separate functionality kicks in
* @return the number of maximum retries
*/
public int getMaxRetry() { public int getMaxRetry() {
return maxRetry; return maxRetry;
} }
/**
* Set the number of maximum retries before throwing on the exception
* @param maxRetry
*/
@Resource(name="maxRetry") @Resource(name="maxRetry")
public void setMaxRetry(int maxRetry) { public void setMaxRetry(int maxRetry) {
this.maxRetry = maxRetry; this.maxRetry = maxRetry;
} }
/**
* Retrieve the operationId
* @return A randomly generated UUID. generated during the retry method
*/
public String getOperationId() { public String getOperationId() {
return operationId; return operationId;
} }
/**
* Retrieve the last encountered exception
* @return An Exception object, the last one encountered in the retry method
*/
public Exception getError() { public Exception getError() {
return error; return error;
} }
/**
* Set the last encountered error
* @param error
*/
public void setError(Exception error) { public void setError(Exception error) {
this.error = error; this.error = error;
} }
@@ -85,7 +126,7 @@ public abstract class MetadataSource {
/** /**
* log4j logger * log4j logger
*/ */
private static Logger log = Logger.getLogger(MetadataSource.class); private static Logger log = Logger.getLogger(AbstractRemoteMetadataSource.class);
/** /**
* Command pattern implementation. the callable.call method will be retried * Command pattern implementation. the callable.call method will be retried
@@ -96,7 +137,7 @@ public abstract class MetadataSource {
* the public methods of this class. * the public methods of this class.
* @param <T> return type. Generics for type safety. * @param <T> return type. Generics for type safety.
* @return The result of the call * @return The result of the call
* @throws com.atmire.import_citations.configuration.SourceException if something unrecoverable happens (e.g. network failures) * @throws org.dspace.importer.external.exception.MetadataSourceException if something unrecoverable happens (e.g. network failures)
*/ */
protected <T> T retry(Callable<T> callable) throws MetadataSourceException { protected <T> T retry(Callable<T> callable) throws MetadataSourceException {
@@ -144,34 +185,53 @@ public abstract class MetadataSource {
} }
protected void handleException(int retry, Exception e, String operationId) throws MetadataSourceException { /**
* Handles a given exception or throws on a {@link org.dspace.importer.external.exception.MetadataSourceException} if no ExceptionHandler is set
* @param retry The number of retries before the exception was thrown on
* @param exception The exception to handle
* @param operationId The id of the operation that threw the exception
* @throws MetadataSourceException if no ExceptionHandler is configured for the given exception
*/
protected void handleException(int retry, Exception exception, String operationId) throws MetadataSourceException {
List<SourceExceptionHandler> exceptionHandlers = getExceptionHandler(e); List<SourceExceptionHandler> exceptionHandlers = getExceptionHandler(exception);
if (exceptionHandlers != null && !exceptionHandlers.isEmpty()) { if (exceptionHandlers != null && !exceptionHandlers.isEmpty()) {
for (SourceExceptionHandler exceptionHandler : exceptionHandlers) { for (SourceExceptionHandler exceptionHandler : exceptionHandlers) {
exceptionHandler.handle(this); exceptionHandler.handle(this);
} }
}else{ }else{
throwSourceException(retry, e, operationId); throwSourceException(retry, exception, operationId);
} }
} }
protected List<SourceExceptionHandler> getExceptionHandler(Exception e) { /** Retrieve a list of SourceExceptionHandler objects that have an instanceof the exception configured to them.
* @param exception The exception to base the retrieval of {@link org.dspace.importer.external.exception.SourceExceptionHandler} on
* @return a list of {@link org.dspace.importer.external.exception.SourceExceptionHandler} objects
*/
protected List<SourceExceptionHandler> getExceptionHandler(Exception exception) {
for (Class aClass : exceptionHandlersMap.keySet()) { for (Class aClass : exceptionHandlersMap.keySet()) {
if (aClass.isInstance(e)) { if (aClass.isInstance(exception)) {
return exceptionHandlersMap.get(aClass); return exceptionHandlersMap.get(aClass);
} }
} }
return null; return null;
} }
protected void throwSourceException(int retry, Exception e, String operationId) throws MetadataSourceException { /** Throw a {@link MetadataSourceException}
* @param retry The number of retries before the exception was thrown on
* @param exception The exception to throw
* @param operationId The id of the operation that threw the exception
* @throws MetadataSourceException
*/
protected void throwSourceException(int retry, Exception exception, String operationId) throws MetadataSourceException {
throwSourceExceptionHook(); throwSourceExceptionHook();
// log.error("Source exception", e); log.error("Source exception " + exception.getMessage(),exception);
log.error("Source exception " + e.getMessage()); throw new MetadataSourceException("At retry of operation " + operationId + " " + retry, exception);
throw new MetadataSourceException("At retry of operation " + operationId + " " + retry, e);
} }
/**
* A specified point where methods can be specified or callbacks can be executed
*/
protected void throwSourceExceptionHook() { protected void throwSourceExceptionHook() {
} }
@@ -179,7 +239,7 @@ public abstract class MetadataSource {
/** /**
* Attempts to init a session * Attempts to init a session
* *
* @throws Exception if error * @throws Exception
*/ */
public abstract void init() throws Exception; public abstract void init() throws Exception;

View File

@@ -6,13 +6,16 @@
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.importer.external.service.other; package org.dspace.importer.external.service.components;
/** /**
* Created by Roeland Dillen (roeland at atmire dot com) * @author Roeland Dillen (roeland at atmire dot com)
* Date: 26/09/12
* Time: 11:09
*/ */
public interface Destroyable { public interface Destroyable {
/**
* Destroy the object
* @throws Exception
*/
public void destroy() throws Exception; public void destroy() throws Exception;
} }

View File

@@ -0,0 +1,92 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.service.components;
import org.dspace.content.Item;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.datamodel.ImportRecord;
import java.util.Collection;
/** Common interface for all import implementations.
* @author Roeland Dillen (roeland at atmire dot com)
*/
public interface MetadataSource {
/**
* Gets the number of records matching a query
* @param query the query in string format
* @return the number of records matching the query
* @throws MetadataSourceException
*/
public int getNbRecords(String query) throws MetadataSourceException;
/**
* Gets the number of records matching a query
* @param query the query object
* @return the number of records matching the query
* @throws MetadataSourceException
*/
public int getNbRecords(Query query) throws MetadataSourceException;
/**
* Gets a set of records matching a query. Supports pagination
* @param query the query. The query will generally be posted 'as is' to the source
* @param start offset
* @param count page size
* @return a collection of fully transformed id's
* @throws MetadataSourceException
*/
public Collection<ImportRecord> getRecords(String query, int start, int count)throws MetadataSourceException;
/** Find records based on a object query.
*
* @param query a query object to base the search on.
* @return a set of records. Fully transformed.
* @throws MetadataSourceException
*/
public Collection<ImportRecord> getRecords(Query query)throws MetadataSourceException;
/** Get a single record from the source.
* The first match will be returned
* @param id identifier for the record
* @return a matching record
* @throws MetadataSourceException
*/
public ImportRecord getRecord(String id)throws MetadataSourceException;
/** Get a single record from the source.
* The first match will be returned
* @param query a query matching a single record
* @return a matching record
* @throws MetadataSourceException
*/
public ImportRecord getRecord(Query query)throws MetadataSourceException;
/**
* The string that identifies this import implementation. Preferable a URI
* @return the identifying uri
*/
public String getImportSource();
/** Finds records based on an item
* Delegates to one or more MetadataSource implementations based on the uri. Results will be aggregated.
* @param item an item to base the search on
* @return a collection of import records. Only the identifier of the found records may be put in the record.
* @throws MetadataSourceException if the underlying imports throw any exception.
*/
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException;
/** Finds records based on query object.
* Delegates to one or more MetadataSource implementations based on the uri. Results will be aggregated.
* @param query a query object to base the search on.
* @return a collection of import records. Only the identifier of the found records may be put in the record.
* @throws MetadataSourceException
*/
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException;
}

View File

@@ -0,0 +1,12 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/**
* Service components that are aggregated/used in the {@link org.dspace.importer.external.service.AbstractImportMetadataSourceService} and {@link org.dspace.importer.external.service.ImportService}
* @author Roeland Dillen (roeland at atmire dot com)
*/
package org.dspace.importer.external.service.components;

View File

@@ -1,35 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.service.other;
import org.dspace.content.Item;
import org.dspace.importer.external.MetadataSourceException;
import org.dspace.importer.external.Query;
import org.dspace.importer.external.datamodel.ImportRecord;
import java.util.Collection;
/**
* Created by Roeland Dillen (roeland at atmire dot com)
* Date: 17/09/12
* Time: 14:08
*/
public interface Imports {
public int getNbRecords(String query) throws MetadataSourceException;
public int getNbRecords(Query query) throws MetadataSourceException;
public Collection<ImportRecord> getRecords(String query, int start, int count)throws MetadataSourceException;
public Collection<ImportRecord> getRecords(Query q)throws MetadataSourceException;
public ImportRecord getRecord(String id)throws MetadataSourceException;
public ImportRecord getRecord(Query q)throws MetadataSourceException;
public String getImportSource();
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException;
public Collection<ImportRecord> findMatchingRecords(Query q) throws MetadataSourceException;
}

View File

@@ -0,0 +1,13 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/**
* Main import framework services.
* @author Roeland Dillen (roeland at atmire dot com)
*/
package org.dspace.importer.external.service;

View File

@@ -118,7 +118,7 @@ public class RDFizer {
/** /**
* Returns whether all converted data is printed to stdout. Turtle will be * Returns whether all converted data is printed to stdout. Turtle will be
* used as serialization. * used as serialization.
* @return * @return {@code true} if print all generated data is to be printed to stdout
*/ */
public boolean isStdout() { public boolean isStdout() {
return stdout; return stdout;
@@ -137,7 +137,7 @@ public class RDFizer {
/** /**
* Returns whether verbose information is printed to System.err. Probably * Returns whether verbose information is printed to System.err. Probably
* this is helpful for CLI only. * this is helpful for CLI only.
* @return * @return {@code true} if verbose mode is on
*/ */
public boolean isVerbose() { public boolean isVerbose() {
return verbose; return verbose;
@@ -154,7 +154,7 @@ public class RDFizer {
/** /**
* Returns whether this is a dry run. Probably this is helpful for CLI only. * Returns whether this is a dry run. Probably this is helpful for CLI only.
* @return * @return {@code true} if dry-run mode is on
*/ */
public boolean isDryrun() { public boolean isDryrun() {
return dryrun; return dryrun;
@@ -214,7 +214,7 @@ public class RDFizer {
System.err.println("Cannot determine RDF URI for " System.err.println("Cannot determine RDF URI for "
+ contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " " + dso.getID() + "(handle " + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " " + dso.getID() + "(handle "
+ dso.getHandle() + ")" + ", skipping. Please " + dso.getHandle() + ")" + ", skipping. Please "
+ "delete it specifing the RDF URI."); + "delete it specifying the RDF URI.");
log.error("Cannot detgermine RDF URI for " log.error("Cannot detgermine RDF URI for "
+ contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " " + dso.getID() + "(handle " + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " " + dso.getID() + "(handle "
+ dso.getHandle() + ")" + ", skipping deletion."); + dso.getHandle() + ")" + ", skipping deletion.");
@@ -417,7 +417,7 @@ public class RDFizer {
} }
} }
// Currently Bundles and Bitsreams aren't supported as independent entities. // Currently Bundles and Bitstreams aren't supported as independent entities.
// They should be converted as part of an item. So we do not need to make // They should be converted as part of an item. So we do not need to make
// the recursive call for them. An item itself will be converted as part // the recursive call for them. An item itself will be converted as part
// of the callback call below. // of the callback call below.

View File

@@ -16,7 +16,7 @@ import com.hp.hpl.jena.rdf.model.Resource;
/** /**
* Schema for DSpace Metadata RDF Mappings. * Schema for DSpace Metadata RDF Mappings.
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de) * @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
* @see http://digital-repositories.org/ontologies/dspace-metadata-mapping/0.2.0 * @see <a href="http://digital-repositories.org/ontologies/dspace-metadata-mapping/0.2.0">http://digital-repositories.org/ontologies/dspace-metadata-mapping/0.2.0</a>
*/ */
public class DMRM { public class DMRM {
/** <p>The RDF model that holds the vocabulary terms</p> */ /** <p>The RDF model that holds the vocabulary terms</p> */

View File

@@ -377,7 +377,7 @@ implements ConverterPlugin
} }
// add all parents // add all parents
for (DSpaceObject parent : collection.getCommunities()) for (DSpaceObject parent : communityService.getAllParents(context, collection))
{ {
if (!RDFUtil.isPublicBoolean(context, parent)) if (!RDFUtil.isPublicBoolean(context, parent))
{ {

View File

@@ -170,7 +170,6 @@ public class Negotiator {
* equals! Caution should be exercised when using it to order a sorted set * equals! Caution should be exercised when using it to order a sorted set
* or a sorted map. Take a look at the java.util.Comparator for further * or a sorted map. Take a look at the java.util.Comparator for further
* information.</p> * information.</p>
* @param mediaRangeRegex
* @return A comparator that imposes orderings that are inconsistent with equals! * @return A comparator that imposes orderings that are inconsistent with equals!
*/ */
public static Comparator<MediaRange> getMediaRangeComparator() { public static Comparator<MediaRange> getMediaRangeComparator() {

View File

@@ -18,21 +18,21 @@ import java.util.List;
public interface RDFStorage { public interface RDFStorage {
/** /**
* Don't use this method directly, use * Don't use this method directly, use
* {@link org.dspace.rdf.RDFizer#convert(org.dspace.core.Context, * {@link org.dspace.rdf.RDFUtil#convert(org.dspace.core.Context, org.dspace.content.DSpaceObject) RDFizer.convert(...)}
* org.dspace.content.DSpaceObject) RDFizer.convert(...)} to convert and store DSpaceObjets. * to convert and store DSpaceObjets.
* @param uri Identifier for this DSO * @param uri Identifier for this DSO
* ({@link org.dspace.rdf.RDFizer#generateIdentifier(org.dspace.core.Context, * ({@link org.dspace.rdf.RDFUtil#generateIdentifier(org.dspace.core.Context, org.dspace.content.DSpaceObject) RDFizer.generateIdentifier(...)}).
* org.dspace.content.DSpaceObject) RDFizer.generateIdentifier(...)}). You can load this model by using this URI. * You can load this model by using this URI.
* @param model The model to store. * @param model The model to store.
* @see org.dspace.rdf.RDFizer; * @see org.dspace.rdf.RDFizer#RDFizer
*/ */
public void store(String uri, Model model); public void store(String uri, Model model);
/** /**
* Don't use this method directly, use * Don't use this method directly, use
* {@link org.dspace.rdf.RDFizer#loadModel(String) RDFizer.loadModel(...)} instead. * {@link org.dspace.rdf.RDFUtil#loadModel(String) RDFizer.loadModel(...)} instead.
* @param uri * @param uri
* @return * @return the model
*/ */
public Model load(String uri); public Model load(String uri);

View File

@@ -23,12 +23,12 @@ import org.dspace.core.Context;
* type SITE, COMMUNITY, COLLECTION or ITEM only. Currently dspace-rdf * type SITE, COMMUNITY, COLLECTION or ITEM only. Currently dspace-rdf
* doesn't support Bundles or Bitstreams as independent entity. * doesn't support Bundles or Bitstreams as independent entity.
* *
* @class{org.dspace.rdf.RDFizer} uses a URIGenerator to generate URIs to * {@link org.dspace.rdf.RDFizer#RDFizer} uses a URIGenerator to generate URIs to
* Identify DSpaceObjects in RDF. You can configure which URIGenerator should be * Identify DSpaceObjects in RDF. You can configure which URIGenerator should be
* used. See DSpace documentation on how to configure RDFizer. * used. See DSpace documentation on how to configure RDFizer.
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de) * @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
* @see org.dspace.rdf.RDFizer * @see org.dspace.rdf.RDFizer#RDFizer
* @see org.dspace.rdf.RDFUtil * @see org.dspace.rdf.RDFUtil#RDFUtil
*/ */
public interface URIGenerator { public interface URIGenerator {
@@ -39,7 +39,10 @@ public interface URIGenerator {
* doesn't support Bundles or Bitstreams as independent entity. This method * doesn't support Bundles or Bitstreams as independent entity. This method
* should work even if the DSpaceObject does not exist anymore. * should work even if the DSpaceObject does not exist anymore.
* @param context * @param context
* @param dso * @param type
* @param id
* @param handle
* @param identifiers
* @return May return null, if no URI could be generated. * @return May return null, if no URI could be generated.
* @see org.dspace.rdf.RDFUtil#generateIdentifier(Context, DSpaceObject) * @see org.dspace.rdf.RDFUtil#generateIdentifier(Context, DSpaceObject)
*/ */
@@ -47,8 +50,7 @@ public interface URIGenerator {
throws SQLException; throws SQLException;
/** /**
* Shortcut for {@code generateIdentifier(context, dso.getType(), * Shortcut for {@code generateIdentifier(context, dso.getType(), dso.getID(), dso.getHandle())}.
* dso.getID(), dso.getHandle())}.
* *
* @param context * @param context
* @param dso * @param dso

View File

@@ -16,6 +16,10 @@ import org.dspace.usage.AbstractUsageEventListener;
import org.dspace.usage.UsageEvent; import org.dspace.usage.UsageEvent;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
/*
* @deprecated As of DSpace 6.0, ElasticSearch statistics are replaced by Solr statistics
* @see org.dspace.statistics.service.SolrLoggerUsageEventListener#SolrLoggerUsageEventListener
*/
public class ElasticSearchLoggerEventListener extends AbstractUsageEventListener { public class ElasticSearchLoggerEventListener extends AbstractUsageEventListener {
private static Logger log = Logger.getLogger(ElasticSearchLoggerEventListener.class); private static Logger log = Logger.getLogger(ElasticSearchLoggerEventListener.class);

View File

@@ -51,6 +51,10 @@ import java.net.URL;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.*; import java.util.*;
/*
* @deprecated As of DSpace 6.0, ElasticSearch statistics are replaced by Solr statistics
* @see org.dspace.statistics.SolrLoggerServiceImpl#SolrLoggerServiceImpl
*/
public class ElasticSearchLoggerServiceImpl implements ElasticSearchLoggerService, InitializingBean { public class ElasticSearchLoggerServiceImpl implements ElasticSearchLoggerService, InitializingBean {
private static Logger log = Logger.getLogger(ElasticSearchLoggerServiceImpl.class); private static Logger log = Logger.getLogger(ElasticSearchLoggerServiceImpl.class);

View File

@@ -21,6 +21,9 @@ import java.util.HashMap;
/** /**
* Service interface class for the Elastic Search logging. * Service interface class for the Elastic Search logging.
* The implementation of this class is responsible for all business logic calls for the Elastic Search logging and is autowired by spring * The implementation of this class is responsible for all business logic calls for the Elastic Search logging and is autowired by spring
*
* @deprecated As of DSpace 6.0, ElasticSearch statistics are replaced by Solr statistics
* @see org.dspace.statistics.service.SolrLoggerService#SolrLoggerService
* *
* @author kevinvandevelde at atmire.com * @author kevinvandevelde at atmire.com
*/ */

View File

@@ -90,7 +90,7 @@ public class SpiderDetector {
/** /**
* Get an immutable Set representing all the Spider Addresses here * Get an immutable Set representing all the Spider Addresses here
* *
* @return * @return a set of IP addresses as strings
*/ */
public static Set<String> getSpiderIpAddresses() { public static Set<String> getSpiderIpAddresses() {

View File

@@ -21,20 +21,20 @@ import java.util.Map;
public interface BitStoreService public interface BitStoreService
{ {
/** /**
* Initialize the asset store * Initialize the asset store
* *
*/ */
public void init() throws IOException; public void init() throws IOException;
/** /**
* Return an identifier unique to this asset store instance * Return an identifier unique to this asset store instance
* *
* @return a unique ID * @return a unique ID
*/ */
public String generateId(); public String generateId();
/** /**
* Retrieve the bits for bitstream * Retrieve the bits for bitstream
* *
* @param bitstream * @param bitstream
@@ -56,13 +56,11 @@ public interface BitStoreService
* </p> * </p>
* *
* @param bitstream * @param bitstream
* The bitsream object * The bitstream object
* @param inputStream * @param inputStream
* The stream of bits * The stream of bits
* @exception java.io.IOException * @exception java.io.IOException
* If a problem occurs while storing the bits * If a problem occurs while storing the bits
*
* @return Map containing technical metadata (size, checksum, etc)
*/ */
public void put(Bitstream bitstream, InputStream inputStream) throws IOException; public void put(Bitstream bitstream, InputStream inputStream) throws IOException;

Some files were not shown because too many files have changed in this diff Show More