Merge pull request #568 from pnbecker/dspace-rdf

DS-2061: Linked (Open) Data support for DSpace
This commit is contained in:
Peter Dietz
2014-10-15 12:38:02 -04:00
54 changed files with 6671 additions and 7 deletions

View File

@@ -300,6 +300,11 @@
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-queryparser</artifactId>
</dependency>
<dependency>
<groupId>org.apache.jena</groupId>
<artifactId>apache-jena-libs</artifactId>
<type>pom</type>
</dependency>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>

View File

@@ -0,0 +1,20 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class ItemNotArchivedException extends Exception {
public ItemNotArchivedException()
{
super("The processed item is not part of the main archive.");
}
}

View File

@@ -0,0 +1,20 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class ItemNotDiscoverableException extends Exception {
public ItemNotDiscoverableException()
{
super("The processed item is not discoverable.");
}
}

View File

@@ -0,0 +1,20 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class ItemWithdrawnException extends Exception {
public ItemWithdrawnException()
{
super("The processed item is withdrawn.");
}
}

View File

@@ -0,0 +1,299 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.core.Constants;
import org.dspace.rdf.conversion.RDFConverter;
import org.dspace.rdf.storage.RDFStorage;
import org.dspace.rdf.storage.URIGenerator;
import org.dspace.services.ConfigurationService;
import org.dspace.utils.DSpace;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class RDFConfiguration {
private static final Logger log = Logger.getLogger(RDFConfiguration.class);
/**
* Property key to load the public address of the SPARQL endpoint.
*/
public static final String SPARQL_ENDPOINT_KEY = "rdf.public.sparql.endpoint";
/**
* Property key to load the class to use as URIGenerator.
*/
public static final String URIGENERATOR_KEY = "rdf.URIGenerator";
/**
* Property key to load the class to use as RDFConverter.
*/
public static final String RDFCONVERTER_KEY = "rdf.converter";
/**
* Property key to load the list of plugins for the RDFConverter.
*/
public static final String CONVERTER_PLUGINS_KEY = "rdf.converter.plugins";
/**
* Key of the Property to load the types of DSpaceObjects that should get
* converted.
*/
public static final String CONVERTER_DSOTYPES_KEY = "rdf.converter.DSOtypes";
/**
* Property key to load the class to use as RDFStorage.
*/
public static final String RDFSTORAGE_KEY = "rdf.storage";
/**
* Property key to load the address of the SPARQL 1.1 GRAPH STORE HTTP
* PROTOCOL endpoint.
*/
public static final String STORAGE_GRAPHSTORE_ENDPOINT_KEY =
"rdf.storage.graphstore.endpoint";
/**
* Property key to load whether HTTP authentication for the
* graph store endpoint is required.
*/
public static final String STORAGE_GRAPHSTORE_AUTHENTICATION_KEY =
"rdf.storage.graphstore.authentication";
/**
* Property key to load the username if authentication for the graph store
* endpoint is required.
*/
public static final String STORAGE_GRAPHSTORE_LOGIN_KEY = "rdf.storage.graphstore.login";
/**
* Property key to load the password if authentication for the graph store
* endpoint is required.
*/
public static final String STORAGE_GRAPHSTORE_PASSWORD_KEY = "rdf.storage.graphstore.password";
/**
* Property key to load the address of the SPARQL endpoint to use within
* DSpace. If the property is empty or does not exist, the public SPARQL
* endpoint will be used.
*/
public static final String STORAGE_SPARQL_ENDPOINT_KEY = "rdf.storage.sparql.endpoint";
/**
* Property key to load whether HTTP authentication for the internal SPARQL
* endpoint is required.
*/
public static final String STORAGE_SPARQL_AUTHENTICATION_KEY = "rdf.storage.sparql.authentication";
/**
* Property key to load the username if authentication for the internal
* SPARQL endpoint is required.
*/
public static final String STORAGE_SPARQL_LOGIN_KEY = "rdf.storage.sparql.login";
/**
* Property key to load the password if authentication for the internal
* SPARQL endpoint is required.
*/
public static final String STORAGE_SPARQL_PASSWORD_KEY = "rdf.storage.sparql.password";
/**
* Property key to load the URL of the dspace-rdf module. This is necessary
* to create links from the jspui or xmlui to RDF representation of
* DSpaceObjects.
*/
public static final String CONTEXT_PATH_KEY = "rdf.contextPath";
public static final String CONTENT_NEGOTIATION_KEY = "rdf.contentNegotiation.enable";
private static URIGenerator generator;
private static RDFStorage storage;
private static RDFConverter converter;
public static String[] getConverterPlugins()
{
return RDFConfiguration.loadConfigurationArray(CONVERTER_PLUGINS_KEY);
}
public static String[] getDSOTypesToConvert()
{
String dsoTypes = (new DSpace()).getConfigurationService().getProperty(
CONVERTER_DSOTYPES_KEY);
if (StringUtils.isEmpty(dsoTypes))
{
log.warn("Property rdf." + CONVERTER_DSOTYPES_KEY + " was not found "
+ "or is empty. Will convert all type of DSpace Objects.");
return Constants.typeText;
}
return dsoTypes.split(",\\s*");
}
public static boolean isConvertType(int type)
{
for (String typeName : getDSOTypesToConvert())
{
if (Constants.getTypeID(typeName) == type) return true;
}
return false;
}
public static boolean isConvertType(String type)
{
for (String typeName : getDSOTypesToConvert())
{
if (typeName.equalsIgnoreCase(type)) return true;
}
return false;
}
public static boolean isContentNegotiationEnabled()
{
ConfigurationService configurationService =
new DSpace().getConfigurationService();
return configurationService.getPropertyAsType(CONTENT_NEGOTIATION_KEY,
false);
}
public static String getPublicSparqlEndpointAddress()
{
ConfigurationService configurationService =
new DSpace().getConfigurationService();
return configurationService.getProperty(SPARQL_ENDPOINT_KEY);
}
public static String getInternalSparqlEndpointAddress()
{
ConfigurationService configurationService =
new DSpace().getConfigurationService();
String internalSparqlEndpoint =
configurationService.getProperty(STORAGE_SPARQL_ENDPOINT_KEY);
String externalSparqlEndpoint =
configurationService.getProperty(SPARQL_ENDPOINT_KEY);
return StringUtils.isEmpty(internalSparqlEndpoint) ?
externalSparqlEndpoint : internalSparqlEndpoint;
}
public static String getDSpaceRDFModuleURI()
{
ConfigurationService configurationService =
new DSpace().getConfigurationService();
return configurationService.getProperty(CONTEXT_PATH_KEY);
}
protected static RDFConverter getRDFConverter()
{
if (converter == null)
{
ConfigurationService configurationService =
new DSpace().getConfigurationService();
converter = (RDFConverter) initializeClass(configurationService,
RDFCONVERTER_KEY, "RDFConverter");
}
return converter;
}
/*
* Initialize the URIGenerator configured in dsapce config (see
* {@link #URIGENERATOR_KEY URIGENERATOR_KEY}).
* The URIGenerator should be configurable, using the DSpace configuration
* and not using spring to avoid xml configuration. This method loads and
* initialize the configured URIGenerator. It is static so that the
* RDFizer must not be initialized to generate the identifier for a DSO.
*/
protected static URIGenerator getURIGenerator()
{
if (generator == null)
{
ConfigurationService configurationService =
new DSpace().getConfigurationService();
generator = (URIGenerator) initializeClass(configurationService,
URIGENERATOR_KEY, "URIGenerator");
}
return generator;
}
/*
* Initialize the RDFStorage configured in dsapce config (see
* {@link #RDFSTORAGE_KEY RDFSTORAGE_KEY}).
* The storage class should be configurable, using the DSpace configuration
* and not using spring to avoid xml configuration. This method loads and
* initialize the configured RDFStorage class. It is static so that the
* RDFizer must not be initialized to load RDF data.
*/
protected static RDFStorage getRDFStorage()
{
if (storage == null)
{
ConfigurationService configurationService =
new DSpace().getConfigurationService();
storage = (RDFStorage) initializeClass(configurationService,
RDFSTORAGE_KEY, "RDFStorage");
}
return storage;
}
/**
* Load a comma separated value out of the configuration an split it into a
* string array.
* @param key Key of the configuration value.
* @return Null if the configuration value was not found or empty. A String
* array representing the configuration value splitted on commas.
*/
public static String[] loadConfigurationArray(String key)
{
String value = (new DSpace()).getConfigurationService().getProperty(key);
if (StringUtils.isEmpty(value))
{
return null;
}
return value.split(",\\s*");
}
/*
* This method must by static, so we can use it from
* RDFizer.generateIdentifier and RDFizer.generateGraphName. Cause this
* method is static we cannot use the configurationService initilised in
* the class constructor.
* This method loads from DSpace configuration which class to use and
* initalizes it.
*/
private static Object initializeClass(ConfigurationService configurationService,
String propertyName,
String objectName)
{
String className = configurationService.getProperty(propertyName);
if (StringUtils.isEmpty(className))
{
log.error("Cannot load " + objectName + "! Property " + propertyName
+ " not found or empty!");
throw new RuntimeException("Cannot load " + objectName
+ ", property not found or not configured!");
}
Object instantiatedObject = null;
try
{
Class objectClass = Class.forName(className);
instantiatedObject = objectClass.newInstance();
} catch (ClassNotFoundException ex) {
log.error("Cannot find class '" + className + "' for " + objectName
+ ". " + "Please check your configuration.", ex);
throw new RuntimeException("Cannot find class for " + objectName
+ " (" + className + ").", ex);
} catch (InstantiationException ex) {
log.error("Cannot instantiate " + objectName + " (class "
+ className + ").", ex);
throw new RuntimeException("Cannot instantiate " + objectName
+ " (class " + className + ").", ex);
} catch (IllegalAccessException ex) {
log.error("IllegalAccessException thrown while instantiating the "
+ objectName + " (class " + className + ").", ex);
throw new RuntimeException("IllegalAccessException thrown while "
+ "instantiating the " + objectName + " (class "
+ className + ").", ex);
}
return instantiatedObject;
}
}

View File

@@ -0,0 +1,470 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf;
import com.hp.hpl.jena.rdf.model.Model;
import java.sql.SQLException;
import java.util.Deque;
import java.util.LinkedList;
import java.util.NoSuchElementException;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.Site;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.event.Consumer;
import org.dspace.event.Event;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class RDFConsumer implements Consumer
{
private static final Logger log = Logger.getLogger(RDFConsumer.class);
protected Deque<DSOIdentifier> toConvert;
protected Deque<DSOIdentifier> toDelete;
@Override
public void consume(Context ctx, Event event)
throws SQLException
{
if (this.toConvert == null)
{
log.debug("Initalized first queue.");
this.toConvert = new LinkedList<>();
}
if (this.toDelete == null)
{
log.debug("Initalized second queue.");
this.toDelete = new LinkedList<>();
}
int sType = event.getSubjectType();
switch (sType)
{
case (Constants.BITSTREAM) :
{
this.consumeBitstream(ctx, event);
}
case (Constants.BUNDLE) :
{
this.consumeBundles(ctx, event);
return;
}
case (Constants.ITEM) :
{
this.consumeCommunityCollectionItem(ctx, event);
return;
}
case (Constants.COLLECTION) :
{
this.consumeCommunityCollectionItem(ctx, event);
return;
}
case (Constants.COMMUNITY) :
{
this.consumeCommunityCollectionItem(ctx, event);
return;
}
case (Constants.SITE) :
{
this.consumeSite(ctx,event);
return;
}
default:
{
log.warn("RDFConsumer should not have been given this kind of "
+ "subject in an event, skipping: " + event.toString());
}
}
}
public void consumeBitstream(Context ctx, Event event) throws SQLException
{
if (event.getEventType() == Event.MODIFY
|| event.getEventType() == Event.MODIFY_METADATA)
{
Bitstream bitstream = Bitstream.find(ctx, event.getSubjectID());
if (bitstream == null)
{
log.warn("Cannot find bitstream " + event.getSubjectID() + "! "
+ "Ignoring, as it is likely it was deleted "
+ "and we'll cover it by a REMOVE event on its bundle.");
return;
}
Bundle[] bundles = bitstream.getBundles();
for (Bundle b : bundles)
{
Item[] items = b.getItems();
for (Item i : items)
{
DSOIdentifier id = new DSOIdentifier(i, ctx);
if (!this.toDelete.contains(id) && !this.toConvert.contains(id))
{
this.toConvert.addLast(id);
}
}
}
return;
}
// ignore create and delete event on Bitstreams, as they should be
// reported as ADD and REMOVE on their bundles as well.
if (event.getEventType() == Event.CREATE
|| event.getEventType() == Event.DELETE)
{
return;
}
// Events of type ADD and REMOVE does currently (DSpace 4.1) not exist
// on a bitstream
log.warn("Got an unexpected event type (" + event.getEventTypeAsString()
+ ") for a bitstream. Ignoring.");
}
public void consumeBundles(Context ctx, Event event) throws SQLException
{
if (event.getEventType() == Event.ADD
|| event.getEventType() == Event.REMOVE
|| event.getEventType() == Event.MODIFY
|| event.getEventType() == Event.MODIFY_METADATA)
{
// either a Bitstream was added or removed or the Bundle was changed
// update its item.
Bundle bundle = Bundle.find(ctx, event.getSubjectID());
if (bundle == null)
{
log.warn("Cannot find bundle " + event.getSubjectID() + "! "
+ "Ignoring, as it is likely it was deleted "
+ "and we'll cover it by a REMOVE event on its item.");
return;
}
Item[] items = bundle.getItems();
for (Item i : items)
{
DSOIdentifier id = new DSOIdentifier(i, ctx);
if (!this.toDelete.contains(id) && !this.toConvert.contains(id))
{
this.toConvert.addLast(id);
}
}
}
// ignore create and delete event on Bundles, as they should be
// reported as ADD and REMOVE on their items as well.
if (event.getEventType() == Event.CREATE
|| event.getEventType() == Event.DELETE)
{
return;
}
log.warn("Got an unexpected event type (" + event.getEventTypeAsString()
+ ") for a bundle. Ignoring.");
}
public void consumeCommunityCollectionItem(Context ctx, Event event) throws SQLException
{
if (event.getSubjectType() != Constants.COMMUNITY
&& event.getSubjectType() != Constants.COLLECTION
&& event.getSubjectType() != Constants.ITEM)
{
log.error("Called on an unexpected Event with subject type "
+ event.getSubjectTypeAsString() + " and event type "
+ event.getEventTypeAsString() + ", ignoring.");
return;
}
if (event.getEventType() == Event.DELETE)
{
DSOIdentifier id = new DSOIdentifier(event.getSubjectType(),
event.getSubjectID(), event.getDetail(), event.getIdentifiers());
if (this.toConvert.contains(id))
{
this.toConvert.remove(id);
}
if (!this.toDelete.contains(id))
{
this.toDelete.addLast(id);
}
return;
}
if (event.getEventType() == Event.MODIFY
|| event.getEventType() == Event.MODIFY_METADATA
|| event.getEventType() == Event.ADD
|| event.getEventType() == Event.REMOVE
|| event.getEventType() == Event.CREATE)
{
// we have to find the dso as the handle is set as detail only
// if the event type is delete.
DSpaceObject dso = event.getSubject(ctx);
if (dso == null)
{
log.warn("Cannot find " + event.getSubjectTypeAsString() + " "
+ event.getSubjectID() + "! " + "Ignoring, as it is "
+ "likely it was deleted and we'll cover it by another "
+ "event with the type REMOVE.");
return;
}
DSOIdentifier id = new DSOIdentifier(dso, ctx);
// If an item gets withdrawn, a MODIFIY event is fired. We have to
// delete the item from the triple store instead of converting it.
// we don't have to take care for reinstantions of items as they can
// be processed as normal modify events.
if (dso instanceof Item
&& event.getDetail() != null
&& event.getDetail().equals("WITHDRAW"))
{
if (this.toConvert.contains(id))
{
this.toConvert.remove(id);
}
if (!this.toDelete.contains(id))
{
this.toDelete.add(id);
return;
}
}
if (!this.toDelete.contains(id)
&& !this.toConvert.contains(id))
{
this.toConvert.addLast(id);
}
}
}
public void consumeSite(Context ctx, Event event)
{
// in case a top level community was added or remove.
// event type remove won't be thrown until DS-1966 is fixed (f.e. by
// merging PR #517).
if (event.getEventType() == Event.ADD
|| event.getEventType() == Event.REMOVE)
{
DSOIdentifier id = new DSOIdentifier(Constants.SITE,
Site.SITE_ID, Site.getSiteHandle(), new String[] {Site.getSiteHandle()});
if (!this.toConvert.contains(id)) this.toConvert.add(id);
return;
}
log.warn("Got an unexpected Event for the SITE. Event type is "
+ event.getEventTypeAsString() + ", ignoring.");
}
@Override
public void end(Context ctx) throws Exception {
log.debug("Started processing of queued events.");
// create a new context, to be sure to work as anonymous user
// we don't want to store private data in a triplestore with public
// SPARQL endpoint.
ctx = new Context(Context.READ_ONLY);
if (toDelete == null)
{
log.debug("Deletion queue does not exists, creating empty queue.");
this.toDelete = new LinkedList<>();
}
if (toConvert != null)
{
log.debug("Starting conversion of DSpaceObjects.");
while (true)
{
DSOIdentifier id;
try { id = toConvert.removeFirst(); }
catch (NoSuchElementException ex) { break; }
if (toDelete.contains(id))
{
log.debug("Skipping " + Constants.typeText[id.type] + " "
+ Integer.toString(id.id) + " as it is marked for "
+ "deletion as well.");
continue;
}
log.debug("Converting " + Constants.typeText[id.type] + " "
+ Integer.toString(id.id) + ".");
convert(ctx, id);
}
log.debug("Conversion ended.");
}
log.debug("Starting to delete data from the triple store...");
while (true)
{
DSOIdentifier id;
try { id = toDelete.removeFirst(); }
catch (NoSuchElementException ex) { break; }
log.debug("Going to delete data from " +
Constants.typeText[id.type] + " "
+ Integer.toString(id.id) + ".");
delete(ctx, id);
}
ctx.abort();
log.debug("Deletion finished.");
}
void convert(Context ctx, DSOIdentifier id) throws SQLException
{
Model m = null;
try
{
if (id.type == Constants.SITE)
{
m = RDFUtil.convertAndStore(ctx, Site.find(ctx, 0));
return;
}
DSpaceObject dso = DSpaceObject.find(ctx, id.type, id.id);
if (dso == null)
{
log.error("Cannot find " + Constants.typeText[id.type]
+ " " + id.id + " unexpectedly! Will delete all "
+ "information about it in the triple store.");
toDelete.add(id);
return;
}
m = RDFUtil.convertAndStore(ctx, dso);
}
catch(AuthorizeException ex)
{
log.debug(Constants.typeText[id.type] + " " +
Integer.toString(id.id) + " couldn't be converted: "
+ "anonymous user doesn't have read permsission. "
+ ex.getMessage());
toDelete.add(id);
}
catch (IllegalArgumentException ex)
{
log.error("Ignoring an unexpected IllegalArgumentException: "
+ ex.getMessage(), ex);
}
catch (ItemNotArchivedException ex)
{
log.info("Anonymous user cannot read "
+ Constants.typeText[id.type] + " "
+ Integer.toString(id.id)
+ ": deleting it from the triplestore.");
toDelete.add(id);
}
catch (ItemNotDiscoverableException ex)
{
log.info("Item " + Integer.toString(id.id) + " is not "
+ "discoverable: deleting it from the triplestore.");
toDelete.add(id);
}
catch (ItemWithdrawnException ex)
{
log.info("Item " + Integer.toString(id.id) + " is withdrawn: "
+ "deleting it from the triplestore.");
toDelete.add(id);
}
catch (RDFMissingIdentifierException ex)
{
log.warn("Cannot convert " + Constants.typeText[id.type]
+ " " + Integer.toString(id.id) + ", as no RDF "
+ "identifier could be generated: "
+ ex.getMessage(), ex);
}
finally
{
if (m != null)
{
m.close();
}
}
}
void delete(Context context, DSOIdentifier id)
throws SQLException {
try
{
RDFUtil.delete(context, id.type, id.id, id.handle, id.identifiers);
}
catch (RDFMissingIdentifierException ex)
{
log.warn("Cannot delete " + Constants.typeText[id.type] + " "
+ Integer.toString(id.id) + ": "
+ ex.getMessage(), ex);
}
}
@Override
public void finish(Context ctx) throws Exception {
}
@Override
public void initialize() throws Exception {
}
class DSOIdentifier
{
int type;
int id;
String handle;
String[] identifiers;
DSOIdentifier(int type, int id, String handle, String[] identifiers)
{
this.type = type;
this.id = id;
this.handle = handle;
this.identifiers = identifiers;
}
DSOIdentifier(DSpaceObject dso, Context ctx)
{
if (dso.getType() != Constants.SITE
&& dso.getType() != Constants.COMMUNITY
&& dso.getType() != Constants.COLLECTION
&& dso.getType() != Constants.ITEM)
{
throw new IllegalArgumentException("Provided DSpaceObject does"
+ " not have a handle!");
}
this.type = dso.getType();
this.id = dso.getID();
this.handle = dso.getHandle();
this.identifiers = dso.getIdentifiers(ctx);
}
@Override
public boolean equals(Object o)
{
if (!(o instanceof DSOIdentifier)) return false;
DSOIdentifier dsoId = (DSOIdentifier) o;
/*
log.warn("Testing if " + Constants.typeText[this.type] + " "
+ Integer.toString(this.id) + " and "
+ Constants.typeText[dsoId.type] + " "
+ Integer.toString(dsoId.id) + " are equal.");
*/
return (this.type == dsoId.type && this.id == dsoId.id);
}
@Override
public int hashCode()
{
/* log.debug("Created hash " + Integer.toString(this.type + (10*this.id)));*/
// as at least up to DSpace version 4.1 DSpaceObjectType is a
// one-digit number, this should produce an distinct hash.
return this.type + (10*this.id);
}
}
}

View File

@@ -0,0 +1,27 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf;
import org.dspace.core.Constants;
/**
* RDFConverter Exception
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
class RDFMissingIdentifierException extends Exception {
public RDFMissingIdentifierException()
{
super("Coudln't generate a necessary RDF Identifier.");
}
RDFMissingIdentifierException(int type, int id) {
super("Couldn't generate a necessary RDF Identifier for "
+ Constants.typeText[type] + " " + Integer.toString(id) + ".");
}
}

View File

@@ -0,0 +1,298 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf;
import com.hp.hpl.jena.rdf.model.Model;
import java.sql.SQLException;
import java.util.logging.Level;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.Site;
import org.dspace.core.Constants;
import org.dspace.core.Context;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class RDFUtil {
private static final Logger log = Logger.getLogger(RDFUtil.class);
/**
* Loads converted data of a DSpaceObject identified by the URI provided
* as {@code identifier}. This method uses the RDFStorage configurated in
* the DSpace configuration. Close the model
* ({@link com.hp.hpl.jena.rdf.model.Model#close() Model.close()}) as soon
* as possible to free system resources.
* @param identifier A URI representing the object you want to load data about.
* @return A model containing the RDF data to the specified identifier or
* null if no data could be found.
*/
public static Model loadModel(String identifier)
{
return RDFConfiguration.getRDFStorage().load(identifier);
}
/**
* Generates a URI identifying the provided DSpaceObject. This method
* automatically loads and instantiates the URIGenerator configured in
* DSpace configuration.
* Please note that URIs can be generated for DSpaceObjects of the
* type SITE, COMMUNITY, COLLECTION or ITEM only. Currently dspace-rdf
* doesn't support Bundles or Bitstreams as independent entity.
* @param context DSpace Context.
* @param dso DSpace Object you want to get an identifier for.
* @return URI to identify the DSO or null if no URI could be generated.
* This can happen f.e. if you use a URIGenerator that uses
* persistent identifier like DOIs or Handles but there is no such
* identifier assigned to the provided DSO.
*/
public static String generateIdentifier(Context context, DSpaceObject dso)
throws SQLException
{
return RDFConfiguration.getURIGenerator().generateIdentifier(context, dso);
}
/**
* Generates a URI identifying the provided DSpaceObject. This method
* automatically loads and instantiates the URIGenerator configured in
* DSpace configuration.
* Please note that URIs can be generated for DSpaceObjects of the
* type SITE, COMMUNITY, COLLECTION or ITEM only. Currently dspace-rdf
* doesn't support Bundles or Bitstreams as independent entity.
* @param context DSpace Context.
* @param type Type of the DSpaceObject you want to generate a URI for (g.e.
* {@link org.dspace.core.Constants#ITEM Constants.ITEM}.
* @param id ID of the DSpaceObject you want to generate a URI for.
* @param handle Handle of the DSpaceObject you want to generate a URI for.
* @return URI to identify the DSO or null if no URI could be generated.
* This can happen f.e. if you use a URIGenerator that uses
* persistent identifier like DOIs or Handles but there is no such
* identifier assigned to the provided DSO.
*/
public static String generateIdentifier(Context context, int type, int id,
String handle, String[] identifier)
throws SQLException
{
return RDFConfiguration.getURIGenerator().generateIdentifier(context,
type, id, handle, identifier);
}
/**
* Converts the the provided DSpaceObject into RDF and returns the model.
* Please note that dspace-rdf doesn't support Bundles or Bitstreams as
* independent entity. You can convert DSpaceObjects of type SITE,
* COMMUNITY, COLLECTION or ITEM.
* @param context Consider that the converted data will be stored in a
* triple store, that is outside the range of the DSpace
* authorization mechanism. Unless you are really sure what
* you are doing, you should provide the context of an
* anonymous user here, as the triple store probably provides
* a public SPARQL endpoint.
* @param dso DSpaceObject to convert.
* @return The converted data or null if the conversion result is empty.
* Remember to close the model as soon as you don't need it anymore.
* @throws RDFMissingIdentifierException If no identifier could be generated.
* @throws java.sql.SQLException
* @throws ItemNotArchivedException If you want to convert an Item that is
* not archived.
* @throws ItemWithdrawnException If you want to convert an Item that is
* withdrawn.
* @throws ItemNotDiscoverableException If you want to convert an Item that
* is not discoverable.
* @throws AuthorizeException If the DSpaceObject does not have READ
* permissions with the provided context.
* @throws IllegalArgumentException If the DSpaceObject is not of type SITE,
* COMMUNITY, COLLECTION or ITEM.
*/
public static Model convert(Context context, DSpaceObject dso)
throws RDFMissingIdentifierException, SQLException, ItemNotArchivedException,
ItemWithdrawnException, ItemNotDiscoverableException,
AuthorizeException, IllegalArgumentException
{
if (dso.getType() != Constants.SITE
&& dso.getType() != Constants.COMMUNITY
&& dso.getType() != Constants.COLLECTION
&& dso.getType() != Constants.ITEM)
{
throw new IllegalArgumentException(dso.getTypeText()
+ " is currently not supported as independent entity.");
}
if (!RDFConfiguration.isConvertType(dso.getTypeText()))
{
return null;
}
isPublic(context, dso);
return RDFConfiguration.getRDFConverter().convert(context, dso);
}
/**
* Converts a DSpaceObject into RDF data and stores them using the configured
* {@link org.dspace.rdf.storage.RDFStorage RDFStorage}.
* Please note that dspace-rdf doesn't support Bundles or Bitstreams as
* independent entity. You can convert DSpaceObjects of type SITE,
* COMMUNITY, COLLECTION or ITEM.
* @param context Consider that the converted data will be stored in a
* triple store, that is outside the range of the DSpace
* authorization mechanism. Unless you are really sure what
* you are doing, you should provide the context of an
* anonymous user here, as the triple store probably provides
* a public SPARQL endpoint.
* @param dso DSpaceObject to convert.
* @return The converted data or null if the conversion result is empty.
* Remember to close the model as soon as you don't need it anymore.
* @throws RDFMissingIdentifierException If no identifier could be generated.
* @throws java.sql.SQLException
* @throws ItemNotArchivedException If you want to convert an Item that is
* not archived.
* @throws ItemWithdrawnException If you want to convert an Item that is
* withdrawn.
* @throws ItemNotDiscoverableException If you want to convert an Item that
* is not discoverable.
* @throws AuthorizeException If the DSpaceObject does not have READ
* permissions with the provided context.
* @throws IllegalArgumentException If the DSpaceObject is not of type SITE,
* COMMUNITY, COLLECTION or ITEM.
*/
public static Model convertAndStore(Context context, DSpaceObject dso)
throws RDFMissingIdentifierException, SQLException, ItemNotArchivedException,
ItemWithdrawnException, ItemNotDiscoverableException,
AuthorizeException, IllegalArgumentException
{
Model convertedData = convert(context, dso);
String identifier = generateIdentifier(context, dso);
if (StringUtils.isEmpty(identifier))
{
log.error("Cannot generate identifier for dso from type "
+ dso.getTypeText() + " (id: " + dso.getID() + ").");
if (convertedData != null) convertedData.close();
throw new RDFMissingIdentifierException(dso.getType(), dso.getID());
}
if (convertedData == null)
{
// if data about this dso is stored in the triplestore already, we
// should remove it as a conversion currently result in no data
RDFConfiguration.getRDFStorage().delete(identifier);
return null;
}
RDFConfiguration.getRDFStorage().store(identifier, convertedData);
return convertedData;
}
/**
* Checks whether the provided DSpaceObject is readable within the provided
* context and if the DSO is an Item whether it is archived, discoverable
* and not withdrawn.
*
* @param context Consider that the converted data will be stored in a
* triple store, that is outside the range of the DSpace
* authorization mechanism. Unless you are really sure what
* you are doing, you should provide the context of an
* anonymous user here, as the triple store probably provides
* a public SPARQL endpoint.
* @param dso The DSpaceObjet to check.
* @throws SQLException
* @throws ItemNotArchivedException If {@code dso} is an Item and is not
* archived.
* @throws ItemWithdrawnException If {@code dso} is an Item and is withdrawn.
* @throws ItemNotDiscoverableException If {@code dso} is an Item and is not
* discoverable.
* @throws AuthorizeException If {@code context} does not grant {@code READ}
* permissions for {@code dso}.
*/
public static void isPublic(Context context, DSpaceObject dso)
throws SQLException, ItemNotArchivedException, ItemWithdrawnException,
ItemNotDiscoverableException, AuthorizeException
{
// as there is no way to set site permissions in XMLUI or JSPUI, we
// ignore the permissions of the repository root (DSpaceObject of type
// Site).
if (dso instanceof Site)
{
return;
}
AuthorizeManager.authorizeAction(context, dso, Constants.READ);
if (dso instanceof Item)
{
Item item = (Item) dso;
if (!item.isArchived()) throw new ItemNotArchivedException();
if (!item.isDiscoverable()) throw new ItemNotDiscoverableException();
if (item.isWithdrawn()) throw new ItemWithdrawnException();
}
}
/**
* Does the same as {@link #isPublic(Context, DSpaceObject)
* isPublic(Context, DSpaceObject)} but returns a boolean instead of throwing
* exceptions. For those who don't want to deal with catching exceptions.
* @param context Consider that the converted data will be stored in a
* triple store, that is outside the range of the DSpace
* authorization mechanism. Unless you are really sure what
* you are doing, you should provide the context of an
* anonymous user here, as the triple store probably provides
* a public SPARQL endpoint.
* @param dso The DSpaceObjet to check.
* @return true if {@link #isPublic(Context, DSpaceObject)
* isPublic(Context, DSpaceObject)} doesn't throw an exception, false if it
* did.
* @throws SQLException
*/
public static boolean isPublicBoolean(Context context, DSpaceObject dso)
throws SQLException
{
try {
RDFUtil.isPublic(context, dso);
} catch (ItemNotArchivedException | ItemWithdrawnException
| ItemNotDiscoverableException | AuthorizeException ex) {
return false;
}
return true;
}
/**
* Deletes the data identified by the URI from the triple store.
* @param uri URI to identify the named graph to delete.
*/
public static void delete(String uri)
{
RDFConfiguration.getRDFStorage().delete(uri);
}
/**
* This is a shortcut to generate an RDF identifier for a DSpaceObject and
* to delete the identified data from the named graph.
* @param ctx
* @param type DSpaceObject type (g.e. {@link Constants#ITEM Constants.ITEM}).
* @param id Id of the DspaceObject.
* @param handle Handle of the DSpaceObject.
* @throws SQLException
* @throws RDFMissingIdentifierException In case that no Identifier could be generated.
*/
public static void delete(Context ctx, int type, int id, String handle, String[] identifiers)
throws SQLException, RDFMissingIdentifierException
{
String uri = RDFConfiguration.getURIGenerator().generateIdentifier(ctx,
type, id, handle, identifiers);
if (uri != null)
{
RDFConfiguration.getRDFStorage().delete(uri);
} else {
throw new RDFMissingIdentifierException(type, id);
}
}
}

View File

@@ -0,0 +1,819 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf;
import com.hp.hpl.jena.rdf.model.Model;
import java.io.PrintWriter;
import java.sql.SQLException;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArraySet;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.ItemIterator;
import org.dspace.content.Site;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
import org.dspace.services.ConfigurationService;
import org.dspace.utils.DSpace;
/**
* This class manages the handling of RDF data in DSpace. It generates
* identifiers, it loads data, it manages the conversion of DSpace Objects into
* RDF data. It can be used as instantiated object as well as CLI.
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class RDFizer {
private static final Logger log = Logger.getLogger(RDFizer.class);
protected ConfigurationService configurationService;
protected boolean stdout;
protected boolean verbose;
protected boolean dryrun;
protected String lang;
protected Context context;
/**
* Set to remember with DSpaceObject were converted or deleted from the
* triplestore already. This set is helpful when converting or deleting
* multiple DSpaceObjects (g.e. Communities with all Subcommunities and
* Items).
*/
protected Set<String> processed;
public RDFizer() throws SQLException
{
this.configurationService = new DSpace().getConfigurationService();
this.stdout = false;
this.verbose = false;
this.dryrun = false;
this.lang = "TURTLE";
this.processed = new CopyOnWriteArraySet<String>();
this.context = new Context(Context.READ_ONLY);
}
/**
* This method allows you to override the context used for conversion and to
* determine which DSpaceObjects should be deleted from the triplestore,
* consider well if this is really necessary.
* If this method is not used the context of an anonymous user will be used.
* <p>
* Please consider: If your triplestore offers a public sparql endpoint
* all information readable with the provided context will be exposed to
* public!
* If you store your data in a private triplestore that does not provides
* public access, you might consider to use this method to convert all data
* stored in your repository.
* </p>
*
* @param context
*/
protected void overrideContext(Context context)
{
this.context = context;
}
/**
* Returns whether all converted data is printed to stdout. Turtle will be
* used as serialization.
* @return
*/
public boolean isStdout() {
return stdout;
}
/**
* Set this to true to print all generated data to stdout. The data will be
* stored as well, unless {@code dryrun} is set true. Turtle will be used
* as serialization.
* @param stdout
*/
public void setStdout(boolean stdout) {
this.stdout = stdout;
}
/**
* Returns whether verbose information is printed to System.err. Probably
* this is helpful for CLI only.
* @return
*/
public boolean isVerbose() {
return verbose;
}
/**
* Set this to true to print verbose information to System.err. Probably
* this is helpful for CLI only.
* @param verbose
*/
public void setVerbose(boolean verbose) {
this.verbose = verbose;
}
/**
* Returns whether this is a dry run. Probably this is helpful for CLI only.
* @return
*/
public boolean isDryrun() {
return dryrun;
}
/**
* Set this true to prevent any changes on the triple store. Probably this
* is helpful for CLI usage only.
* @param dryrun
*/
public void setDryrun(boolean dryrun) {
this.dryrun = dryrun;
}
/**
* Deletes all data stored in the triplestore (drops all named graphs and
* cleans the default graph).
*/
public void deleteAll()
{
report("Sending delete command to the triple store.");
if (!this.dryrun) RDFConfiguration.getRDFStorage().deleteAll();
report("Deleted all data from the triplestore.");
}
/**
* Delete the data about the DSpaceObject from the triplestore.
* All data about descendent Subcommunities, Collections and Items will be
* deleted as well.
*/
public void delete(DSpaceObject dso, boolean reset)
throws SQLException
{
if (dso.getType() != Constants.SITE
&& dso.getType() != Constants.COMMUNITY
&& dso.getType() != Constants.COLLECTION
&& dso.getType() != Constants.ITEM)
{
throw new IllegalArgumentException(dso.getTypeText()
+ " is currently not supported as independent entity.");
}
if (dso.getType() == Constants.SITE)
{
// we don't need to iterate over all objects, use a shorctut:
this.deleteAll();
}
Callback callback = new Callback() {
@Override
protected void callback(DSpaceObject dso)
throws SQLException
{
String identifier = RDFUtil.generateIdentifier(context, dso);
if (StringUtils.isEmpty(identifier))
{
System.err.println("Cannot determine RDF URI for "
+ dso.getTypeText() + " " + dso.getID() + "(handle "
+ dso.getHandle() + ")" + ", skipping. Please "
+ "delete it specifing the RDF URI.");
log.error("Cannot detgermine RDF URI for "
+ dso.getTypeText() + " " + dso.getID() + "(handle "
+ dso.getHandle() + ")" + ", skipping deletion.");
return;
}
report("Deleting Named Graph" + identifier);
if (!dryrun)
{
RDFConfiguration.getRDFStorage().delete(identifier);
}
}
};
this.dspaceDFS(dso, callback, false, reset);
}
/**
* Converts and stores all DSpaceObjects that are readable for an anonymous
* user.
*/
public void convertAll()
throws SQLException
{
report("Starting conversion of all DSpaceItems, this may take a while...");
this.convert(new Site(), true);
report("Conversion ended.");
}
protected void convert(DSpaceObject dso, boolean reset)
throws SQLException
{
if (dso.getType() != Constants.SITE
&& dso.getType() != Constants.COMMUNITY
&& dso.getType() != Constants.COLLECTION
&& dso.getType() != Constants.ITEM)
{
throw new IllegalArgumentException(dso.getTypeText()
+ " is currently not supported as independent entity.");
}
Callback callback = new Callback() {
@Override
protected void callback(DSpaceObject dso)
throws SQLException
{
Model converted = null;
try
{
if (dryrun)
{
converted = RDFUtil.convert(context, dso);
} else {
converted = RDFUtil.convertAndStore(context, dso);
}
} catch (ItemNotArchivedException ex) {
if (!(dso instanceof Item)) throw new IllegalStateException(ex.getMessage(), ex);
report("Skipping conversion of Item " + dso.getID()
+ " (handle " + dso.getHandle() + "): Item is not "
+ "archived.");
return;
} catch (ItemWithdrawnException ex) {
if (!(dso instanceof Item)) throw new IllegalStateException(ex.getMessage(), ex);
report("Skipping conversion of Item " + dso.getID()
+ " (handle " + dso.getHandle() + "): Item is "
+ "withdrawn.");
return;
} catch (ItemNotDiscoverableException ex) {
if (!(dso instanceof Item)) throw new IllegalStateException(ex.getMessage(), ex);
report("Skipping conversion of Item " + dso.getID()
+ " (handle " + dso.getHandle() + "): Item is not "
+ "discoverable.");
return;
} catch (AuthorizeException ex) {
report("Skipping conversion of " + dso.getTypeText() + " "
+ dso.getID() + " (handle " + dso.getHandle() + ")"
+ ", not authorized: " + ex.getMessage());
return;
} catch (RDFMissingIdentifierException ex) {
String errormessage = "Skipping conversion of "
+ dso.getTypeText() + " " + dso.getID()
+ " (handle " + dso.getHandle() + ").";
log.error(errormessage, ex);
System.err.println(errormessage
+ " Error while converting: " + ex.getMessage());
return;
}
if (stdout) {
if (converted == null)
{
System.err.println("Conversion of " + dso.getTypeText()
+ " " + dso.getID() + " resulted in no data.");
} else {
converted.write(System.out, lang);
}
}
if (converted != null) converted.close();
}
};
this.dspaceDFS(dso, callback, true, reset);
}
protected void dspaceDFS(DSpaceObject dso, Callback callback, boolean check, boolean reset)
throws SQLException
{
if (dso.getType() != Constants.SITE
&& dso.getType() != Constants.COMMUNITY
&& dso.getType() != Constants.COLLECTION
&& dso.getType() != Constants.ITEM)
{
throw new IllegalArgumentException(dso.getTypeText()
+ " is currently not supported as independent entity.");
}
if (reset)
{
this.processed.clear();
}
if (isProcessed(dso))
{
log.debug("Skipping processing of " + dso.getTypeText() + " "
+ dso.getID() + " (handle " + dso.getHandle()
+ "), already processed.");
return;
}
markProcessed(dso);
// this is useful to debug depth first search, but it is really noisy.
// log.debug("Procesing " + dso.getTypeText() + " " + dso.getID() + handle + ".");
// if this method is used for conversion we should check if we have the
// permissions to read a DSO before converting all of it decendents
// (g.e. check read permission on a community before converting all of
// its subcommunties and collections).
// just skip items with missing permissions and report them.
if (check)
{
try
{
RDFUtil.isPublic(context, dso);
} catch (ItemNotArchivedException ex) {
if (!(dso instanceof Item)) throw new IllegalStateException(ex.getMessage(), ex);
report("Skipping processing of Item " + dso.getID()
+ " (handle " + dso.getHandle() + "): Item is not "
+ "archived.");
return;
} catch (ItemWithdrawnException ex) {
if (!(dso instanceof Item)) throw new IllegalStateException(ex.getMessage(), ex);
report("Skipping processing of Item " + dso.getID()
+ " (handle " + dso.getHandle() + "): Item is "
+ "withdrawn.");
return;
} catch (ItemNotDiscoverableException ex) {
if (!(dso instanceof Item)) throw new IllegalStateException(ex.getMessage(), ex);
report("Skipping processing of Item " + dso.getID()
+ " (handle " + dso.getHandle() + "): Item is not "
+ "discoverable.");
return;
} catch (AuthorizeException ex) {
report("Skipping processing of " + dso.getTypeText() + " "
+ dso.getID() + " (handle " + dso.getHandle() + ")"
+ ", not authorized: " + ex.getMessage());
return;
}
}
if (dso instanceof Site)
{
Community[] communities = Community.findAllTop(context);
for (Community community : communities)
{
this.dspaceDFS(community, callback, check, false);
}
}
if (dso instanceof Community)
{
Community[] subcommunities = ((Community) dso).getSubcommunities();
for (Community sub : subcommunities)
{
this.dspaceDFS(sub, callback, check, false);
}
Collection[] collections = ((Community) dso).getCollections();
for (Collection collection : collections)
{
this.dspaceDFS(collection, callback, check, false);
}
}
if (dso instanceof Collection)
{
ItemIterator items = ((Collection) dso).getAllItems();
while (items.hasNext())
{
Item item = items.next();
this.dspaceDFS(item, callback, check, false);
item.decache();
}
}
// Currently Bundles and Bitsreams arn't supported as independent entities.
// The should be converted as part of an item. So we do not need to make
// the recursive call for them.
//
// if (dso instanceof Item)
// {
// Bundle[] bundles = ((Item) dso).getBundles();
// for (Bundle bundle : bundles)
// {
// this.dspaceDFS(bundle, callback, check, false);
// }
// }
//
// if (dso instanceof Bundle)
// {
// Bitstream[] bistreams = ((Bundle) dso).getBitstreams();
// for (Bitstream bitstream : bistreams)
// {
// this.dspaceDFS(bitstream, callback, check, false);
// }
// }
callback.callback(dso);
report("Processed " + dso.getTypeText() + " " + dso.getID()
+ " (handle " + dso.getHandle() + ").");
}
protected boolean isProcessed(DSpaceObject dso)
{
String key = Integer.toString(dso.getType()) + "/"
+ Integer.toString(dso.getID());
return this.processed.contains(key);
}
protected void markProcessed(DSpaceObject dso)
{
String key = Integer.toString(dso.getType()) + "/"
+ Integer.toString(dso.getID());
this.processed.add(key);
}
protected void report(String message)
{
if (this.verbose)
{
System.err.println(message);
}
log.debug(message);
}
protected void runCLI(String[] args)
{
// prepare CLI and parse arguments
Options options = createOptions();
CommandLineParser parser = new PosixParser();
CommandLine line = null;
try
{
line = parser.parse(options, args);
}
catch (ParseException ex)
{
usage(options);
System.err.println();
System.err.println(ex.getMessage());
log.fatal(ex);
System.exit(1);
}
String[] remainingArgs = line.getArgs();
if (remainingArgs.length > 0)
{
this.usage(options);
System.err.println();
StringBuilder builder = new StringBuilder(100);
for (String argument : remainingArgs)
{
if (builder.length() > 0) builder.append(", ");
builder.append(argument);
}
String argumentsLine = builder.toString().trim();
argumentsLine.substring(0, argumentsLine.length() - 1);
System.err.print("Cannot recognize the following argument");
if (remainingArgs.length >= 2) System.err.print("s");
System.err.println(": " + argumentsLine + ".");
System.exit(1);
}
// set member variables depending on CLI arguments.
if (line.hasOption("verbose"))
{
setVerbose(true);
}
if (line.hasOption("dry-run"))
{
setDryrun(true);
}
if (line.hasOption("stdout"))
{
setStdout(true);
}
// check mutual exclusive arguments
if (line.hasOption("delete") && line.hasOption("delete-all"))
{
usage(options);
System.err.println("\n\nYou cannot use the options --delete <handle> "
+ "and --delete-all together.");
System.exit(1);
}
if (line.hasOption("convert-all")
&& (line.hasOption("delete") || line.hasOption("delete-all")))
{
usage(options);
System.err.println("\n\nYou cannot use the option --convert-all "
+ "together with --delete or --delete-all.");
System.exit(1);
}
if (line.hasOption("identifiers")
&& (line.hasOption("delete") || line.hasOption("delete-all")))
{
usage(options);
System.err.println("\n\nYou cannot use the option --identifiers <handle> "
+ "together with --delete or --delete-all.");
System.exit(1);
}
if (line.hasOption("stdout")
&& (line.hasOption("delete") || line.hasOption("delete-all")))
{
usage(options);
System.err.println("\n\nYou cannot use the option --stdout together "
+ "with --delete or --deleta-all.");
System.exit(1);
}
// Run commands depending on CLI arguments.
// process help first to prevent further evaluation of given options.
if (line.hasOption('h'))
{
usage(options);
System.exit(0);
}
if (line.hasOption("delete"))
{
String[] identifiers = line.getOptionValues("delete");
for (String identifier : identifiers)
{
if (!StringUtils.startsWithIgnoreCase(identifier, "hdl:"))
{
if (!this.dryrun)
{
RDFConfiguration.getRDFStorage().delete(identifier);
}
if (this.verbose)
{
System.err.println("Deleted " + identifier + ".");
}
continue;
}
String handle = identifier.substring(4);
log.debug("Trying to resolve identifier " + handle + ".");
DSpaceObject dso = resolveHandle(handle);
if (dso == null) {
// resolveHandle reports problems and return null in case
// of an error or an unresolvable handle.
// Don't report it a second time, just continue...
continue;
}
log.debug("Resolved identifier " + handle + " as "
+ dso.getTypeText() + " " + dso.getID());
try
{
this.delete(dso, true);
}
catch (SQLException ex)
{
log.error(ex);
System.err.println("A problem with the database connection "
+ "occured. Canceled pending actions.");
System.err.println(ex.getMessage());
ex.printStackTrace(System.err);
System.exit(1);
}
}
System.exit(0);
}
if (line.hasOption("delete-all"))
{
this.deleteAll();
System.exit(0);
}
if (line.hasOption("identifiers"))
{
String[] identifiers = line.getOptionValues("identifiers");
report("Starting conversion of specified DSpaceObjects...");
this.processed.clear();
for (String handle : identifiers)
{
log.debug("Trying to resolve identifier " + handle + ".");
DSpaceObject dso = resolveHandle(handle);
if (dso == null) {
// resolveHandle reports problems and return null in case
// of an error or an unresolvable handle.
// Don't report it a second time, just continue...
continue;
}
try
{
this.convert(dso, false);
}
catch (SQLException ex)
{
log.error(ex);
System.err.println("A problem with the database connection "
+ "occured. Canceled pending actions.");
System.err.println(ex.getMessage());
ex.printStackTrace(System.err);
System.exit(1);
}
}
report("Conversion ended.");
System.exit(0);
}
if (line.hasOption("convert-all"))
{
try {
this.convertAll();
}
catch (SQLException ex)
{
log.error(ex);
System.err.println("A problem with the database connection "
+ "occured. Canceled pending actions.");
System.err.println(ex.getMessage());
ex.printStackTrace(System.err);
System.exit(1);
}
System.exit(0);
}
this.usage(options);
System.exit(0);
}
protected DSpaceObject resolveHandle(String handle)
{
DSpaceObject dso = null;
try
{
dso = HandleManager.resolveToObject(this.context, handle);
}
catch (SQLException ex)
{
log.error(ex);
System.err.println("A problem with the database connection "
+ "occured. Canceled pending actions.");
System.err.println(ex.getMessage());
ex.printStackTrace(System.err);
System.exit(1);
}
catch (IllegalStateException ex)
{
log.error(ex);
System.err.println("Cannot recognize identifier '"
+ handle + "', skipping.");
return null;
}
if (dso == null)
{
System.err.println("Cannot resolve identifier '" + handle
+ "', skipping.");
log.debug("Couldn't resolve identifier '" + handle
+ "', dso was null.");
return null;
}
if (dso.getType() != Constants.SITE
&& dso.getType() != Constants.COMMUNITY
&& dso.getType() != Constants.COLLECTION
&& dso.getType() != Constants.ITEM)
{
System.err.println(dso.getTypeText() + " are currently not "
+ "supported as independent entities. Bundles and Bitstreams "
+ "should be processed as part of their item.");
return null;
}
return dso;
}
protected Options createOptions() {
Options options = new Options();
options.addOption("h", "help", false, "Print usage information and exit.");
options.addOption("v", "verbose", false, "Print verbose information to "
+ "stderr while converting data.");
options.addOption("n", "dry-run", false, "Don't store the converted "
+ "data in the triple store, don't delete data from the "
+ "triplestore. Make a dry run, simulation what would happen.");
options.addOption("o", "stdout", false, "Print all converted data to " +
"stdout using turtle as serialization.");
options.addOption("n", "dry-run", false, "Don't send any data or commands " +
"to the triplestore. Usefull for debugging or in conjunction " +
"with --stdout.");
options.addOption("c", "convert-all", false, "Convert all DSpace Objects" +
" that are readable for an anonymous user. This may take a long time" +
"depending on the number of stored communties, collections and " +
"items. Existing information in the triple store will be updated.");
Option optIdentifiers = OptionBuilder.withLongOpt("identifiers")
.hasArgs()
.withArgName("handle")
.withValueSeparator(' ')
.withDescription("Only convert these DSpace Objects. If you specify "
+ "a Community or Collection all of their Items will be "
+ "converted as well. Separate multiple identifiers with a "
+ "space.")
.create('i');
options.addOption(optIdentifiers);
Option optDelete = OptionBuilder.withLongOpt("delete")
.hasArgs()
.withArgName("hdl:handle | URI")
.withValueSeparator(' ')
.withDescription("Delete previously converted data. Specify "
+ "either the handle of a DSpaceObject in the format "
+ "'hdl:<handle>' or the URI used to identify the rdf "
+ "data in the triplestore. If you specify a Community, "
+ "Collection or Item by its handle all converted "
+ "information about attached Subcommunities, "
+ "Collections, Items, Bundles and Bitstreams will be "
+ "deleted as well. Separate multiple identifiers with "
+ "a space.")
.create();
options.addOption(optDelete);
Option optDeleteAll = OptionBuilder.withLongOpt("delete-all")
.withDescription("Delete all converted data from the triplestore.")
.create();
options.addOption(optDeleteAll);
return options;
}
protected static void usage(Options options)
{
String cliSyntax = "[dspace-bin]/bin/dspace rdfizer [OPTIONS...]";
String header = "";
String footer = "\nYou cannot use the options --convert-all, --identifiers " +
"or --stdout together with --delete or --delete-all.\n" +
"Please use at least one option out of --convert-all, --delete, " +
"--delete-all or --identifiers.\n";
PrintWriter err = new PrintWriter(System.err);
HelpFormatter helpformater = new HelpFormatter();
helpformater.printHelp(err, 79, cliSyntax, header, options, 2, 2, footer);
err.flush();
// don't close PrintWriter err, as it would close System.err!
}
public static void main(String[] args)
{
// get a context from an anonymous user.
// don't switch off authorization system! We'll export the converted
// data into a triple store that provides a public sparql endpoint.
// all exported rdf data can be read by anonymous users.
// We won't change the database => read_only context will assure this.
Context context = null;
try {
context = new Context(Context.READ_ONLY);
}
catch (SQLException sqle)
{
log.info("Caught SQLException: ", sqle);
System.err.println("Can't connect to database: " + sqle.getMessage());
context.abort();
System.exit(-1);
}
RDFizer myself = null;
try {
myself = new RDFizer();
} catch (SQLException ex) {
System.err.println("A problem with the database occured: "
+ ex.getMessage());
ex.printStackTrace(System.err);
log.error(ex);
context.abort();
System.exit(1);
}
myself.overrideContext(context);
myself.runCLI(args);
// we don't change anything in the database, so abort the context.
context.abort();
}
protected abstract class Callback
{
protected abstract void callback(DSpaceObject dso)
throws SQLException;
}
}

View File

@@ -0,0 +1,42 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.conversion;
import com.hp.hpl.jena.rdf.model.Model;
import java.sql.SQLException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
import org.dspace.services.ConfigurationService;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public interface ConverterPlugin {
public void setConfigurationService(ConfigurationService configurationService);
/**
* Convert the specified DSpaceObject or a part of it into RDF.
* @param context Please check the READ permission for the provided context
* before converting any data!
* @param dso The DSpaceObject that should be converted.
* @return A Jena Model containing the generated RDF.
*/
public Model convert(Context context, DSpaceObject dso)
throws SQLException, AuthorizeException;
/**
* Returns all type of DSpaceObjects that are supported by this plugin.
* @param type Resource type as defined in org.dspace.core.Constants.
* @return A boolean whether the requested type is supported by this plugin.
* @see org.dspace.core.Constants
*/
public boolean supports(int type);
}

View File

@@ -0,0 +1,97 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.conversion;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Property;
import com.hp.hpl.jena.rdf.model.Resource;
/**
* Schema for DSpace Metadata RDF Mappings.
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
* @see http://digital-repositories.org/ontologies/dspace-metadata-mapping/0.2.0
*/
public class DMRM {
/** <p>The RDF model that holds the vocabulary terms</p> */
private static Model m_model = ModelFactory.createDefaultModel();
/** <p>The namespace of the vocabulary as a string</p> */
public static final String NS = "http://digital-repositories.org/ontologies/dspace-metadata-mapping/0.2.0#";
/** <p>The namespace of the vocabulary as a string</p>
* @see #NS */
public static String getURI() {return NS;}
/** <p>The namespace of the vocabulary as a resource</p> */
public static final Resource NAMESPACE = m_model.createResource( NS );
/** <p>Represents the mapping of a DSpace metadata value to an RDF equivalent.</p> */
public static final Resource DSpaceMetadataRDFMapping = m_model.createResource(NS + "DSpaceMetadataRDFMapping");
/** <p>A reified statement that describes the result of the DSpaceMetadataRDFMapping.</p> */
public static final Resource Result = m_model.createResource(NS + "Result");
/** <p>Processes a metadata value into an RDF value or an IRI.</p> */
public static final Resource ValueProcessor = m_model.createResource(NS + "ValueProcessor");
/** <p>A regular expression to be used with java, composed of a matching and a replaying expression.</p> */
public static final Resource ValueModifier = m_model.createResource( NS + "ValueModifier" );
/** <p>Generates a literal depending on a DSpace metadata value.</p> */
public static final Resource LiteralGenerator = m_model.createResource(NS + "LiteralGenerator");
/** <p>Generates an IRI used for a rdfs:Resource depending on the converted DSpace Object and one of its metadata values.</p> */
public static final Resource ResourceGenerator = m_model.createResource(NS + "ResourceGenerator");
/** <p>Placeholder for the IRI of the DSpace Object that gets converted.</p> */
public static final Resource DSpaceObjectIRI = m_model.createResource( NS + "DSpaceObjectIRI" );
/** <p>Shortcut to generate a Literal containing an unchanged metadata value.</p> */
public static final Resource DSpaceValue = m_model.createResource(NS + "DSpaceValue");
/** <p>Specifies the RDF to generate for a specified matadata.</p> */
public static final Property creates = m_model.createProperty( NS + "creates" );
/** <p>The subject of a DSpace metadata RDF mapping result.</p> */
public static final Property subject = m_model.createProperty( NS + "subject" );
/** <p>The predicate of a DSpace metadata RDF mapping result.</p> */
public static final Property predicate = m_model.createProperty( NS + "predicate" );
/** <p>The object of a DSpace metadata RDF mapping result.</p> */
public static final Property object = m_model.createProperty( NS + "object" );
/** <p>The name of the metadata to convert (g.e. dc.title).</p> */
public static final Property metadataName = m_model.createProperty( NS + "metadataName" );
/** <p>A regex that the metadata value has to fulfill if the mapping should become active.</p> */
public static final Property condition = m_model.createProperty( NS + "condition" );
/** <p>Information how the metadata value should be modified before it is inserted in the pattern.</p> */
public static final Property modifier = m_model.createProperty( NS + "modifier" );
/** <p>A regex that matches those subsequences of a metadata value, that should be replaced.</p> */
public static final Property matcher = m_model.createProperty( NS + "matcher" );
/** <p>A regex that replaces previously matched subsequences of a metadata value.</p> */
public static final Property replacement = m_model.createProperty( NS + "replacement" );
/** <p>A pattern that contains $DSpaceValue as placeholder for the metadata value.</p> */
public static final Property pattern = m_model.createProperty( NS + "pattern" );
/** <p>Defines the datatype a generated literal gets.</p> */
public static final Property literalType = m_model.createProperty( NS + "literalType" );
/** <p>Defines the language a literal uses. Maybe overridden by #dspaceLanguageTag.</p> */
public static final Property literalLanguage = m_model.createProperty( NS + "literalLanguage" );
/** <p>Defines to use the language tag of a DSpace metadata value.</p> */
public static final Property dspaceLanguageTag = m_model.createProperty( NS + "dspaceLanguageTag");
}

View File

@@ -0,0 +1,285 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.conversion;
import com.hp.hpl.jena.rdf.model.InfModel;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.ResIterator;
import com.hp.hpl.jena.reasoner.Reasoner;
import com.hp.hpl.jena.reasoner.ReasonerRegistry;
import com.hp.hpl.jena.reasoner.ValidityReport;
import com.hp.hpl.jena.util.FileManager;
import com.hp.hpl.jena.util.FileUtils;
import com.hp.hpl.jena.vocabulary.RDF;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.app.util.MetadataExposure;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DCValue;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.rdf.RDFUtil;
import org.dspace.services.ConfigurationService;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class MetadataConverterPlugin implements ConverterPlugin
{
public final static String METADATA_MAPPING_PATH_KEY = "rdf.metadata.mappings";
public final static String METADATA_SCHEMA_URL_KEY = "rdf.metadata.schema";
public final static String METADATA_PREFIXES_KEY = "rdf.metadata.prefixes";
private final static Logger log = Logger.getLogger(MetadataConverterPlugin.class);
protected ConfigurationService configurationService;
@Override
public void setConfigurationService(ConfigurationService configurationService) {
this.configurationService = configurationService;
}
@Override
public Model convert(Context context, DSpaceObject dso)
throws SQLException, AuthorizeException {
String uri = RDFUtil.generateIdentifier(context, dso);
if (uri == null)
{
log.error("Cannot create URI for " + dso.getTypeText() + " "
+ dso.getID() + " stopping conversion.");
return null;
}
Model convertedData = ModelFactory.createDefaultModel();
String prefixesPath = configurationService.getProperty(METADATA_PREFIXES_KEY);
if (!StringUtils.isEmpty(prefixesPath))
{
InputStream is = FileManager.get().open(prefixesPath);
if (is == null)
{
log.warn("Cannot find file '" + prefixesPath + "', ignoring...");
} else {
convertedData.read(is, null, FileUtils.guessLang(prefixesPath));
try {
is.close();
}
catch (IOException ex)
{
// nothing to do here.
}
}
}
Model config = loadConfiguration();
if (config == null)
{
log.error("Cannot load MetadataConverterPlugin configuration, "
+ "skipping this plugin.");
return null;
}
/*
if (log.isDebugEnabled())
{
StringWriter sw = new StringWriter();
sw.append("Inferenced the following model:\n");
config.write(sw, "TURTLE");
sw.append("\n");
log.debug(sw.toString());
try {
sw.close();
} catch (IOException ex) {
// nothing to do here
}
}
*/
ResIterator mappingIter =
config.listSubjectsWithProperty(RDF.type, DMRM.DSpaceMetadataRDFMapping);
if (!mappingIter.hasNext())
{
log.warn("No metadata mappings found, returning null.");
return null;
}
List<MetadataRDFMapping> mappings = new ArrayList<>();
while (mappingIter.hasNext())
{
MetadataRDFMapping mapping = MetadataRDFMapping.getMetadataRDFMapping(
mappingIter.nextResource(), uri);
if (mapping != null) mappings.add(mapping);
}
// should be changed, if Communities and Collections have metadata as well.
if (!(dso instanceof Item))
{
log.error("This DspaceObject (" + dso.getTypeText() + " "
+ dso.getID() + ") should not have bin submitted to this "
+ "plugin, as it supports Items only!");
return null;
}
Item item = (Item) dso;
DCValue[] metadata_values = item.getDC(Item.ANY, Item.ANY, Item.ANY);
for (DCValue value : metadata_values)
{
String fieldname = value.schema + "." + value.element;
if (value.qualifier != null)
{
fieldname = fieldname + "." + value.qualifier;
}
if (MetadataExposure.isHidden(context, value.schema, value.element,
value.qualifier))
{
log.debug(fieldname + " is a hidden metadata field, won't "
+ "convert it.");
continue;
}
boolean converted = false;
if (value.qualifier != null)
{
Iterator<MetadataRDFMapping> iter = mappings.iterator();
while (iter.hasNext())
{
MetadataRDFMapping mapping = iter.next();
if (mapping.matchesName(fieldname) && mapping.fulfills(value.value))
{
mapping.convert(value.value, value.language, uri, convertedData);
converted = true;
}
}
}
if (!converted)
{
String name = value.schema + "." + value.element;
Iterator<MetadataRDFMapping> iter = mappings.iterator();
while (iter.hasNext() && !converted)
{
MetadataRDFMapping mapping = iter.next();
if (mapping.matchesName(name) && mapping.fulfills(value.value))
{
mapping.convert(value.value, value.language, uri, convertedData);
converted = true;
}
}
}
if (!converted)
{
log.debug("Did not convert " + fieldname + ". Found no "
+ "corresponding mapping.");
}
}
config.close();
if (convertedData.isEmpty())
{
convertedData.close();
return null;
}
return convertedData;
}
@Override
public boolean supports(int type) {
// should be changed, if Communities and Collections have metadata as well.
return (type == Constants.ITEM);
}
protected Model loadConfiguration()
{
String mappingPathes = configurationService.getProperty(METADATA_MAPPING_PATH_KEY);
if (StringUtils.isEmpty(mappingPathes))
{
return null;
}
String[] mappings = mappingPathes.split(",\\s*");
if (mappings == null || mappings.length == 0)
{
log.error("Cannot find metadata mappings (looking for "
+ "property " + METADATA_MAPPING_PATH_KEY + ")!");
return null;
}
InputStream is = null;
Model config = ModelFactory.createDefaultModel();
for (String mappingPath : mappings)
{
is = FileManager.get().open(mappingPath);
if (is == null)
{
log.warn("Cannot find file '" + mappingPath + "', ignoring...");
}
config.read(is, "file://" + mappingPath, FileUtils.guessLang(mappingPath));
try {
is.close();
}
catch (IOException ex)
{
// nothing to do here.
}
}
if (config.isEmpty())
{
config.close();
log.warn("Metadata RDF Mapping did not contain any triples!");
return null;
}
String schemaURL = configurationService.getProperty(METADATA_SCHEMA_URL_KEY);
if (schemaURL == null)
{
log.error("Cannot find metadata rdf mapping schema (looking for "
+ "property " + METADATA_SCHEMA_URL_KEY + ")!");
}
if (!StringUtils.isEmpty(schemaURL))
{
log.debug("Going to inference over the rdf metadata mapping.");
// Inferencing over the configuration data let us detect some rdf:type
// properties out of rdfs:domain and rdfs:range properties
// A simple rdfs reasoner is enough for this task.
Model schema = ModelFactory.createDefaultModel();
schema.read(schemaURL);
Reasoner reasoner = ReasonerRegistry.getRDFSSimpleReasoner().bindSchema(schema);
InfModel inf = ModelFactory.createInfModel(reasoner, config);
// If we do inferencing, we can easily check for consistency.
ValidityReport reports = inf.validate();
if (!reports.isValid())
{
StringBuilder sb = new StringBuilder();
sb.append("The configuration of the MetadataConverterPlugin is ");
sb.append("not valid regarding the schema (");
sb.append(DMRM.getURI());
sb.append(").\nThe following problems were encountered:\n");
for (Iterator<ValidityReport.Report> iter = reports.getReports();
iter.hasNext() ; )
{
ValidityReport.Report report = iter.next();
if (report.isError)
{
sb.append(" - " + iter.next() + "\n");
}
}
log.error(sb.toString());
return null;
}
return inf;
}
return config;
}
}

View File

@@ -0,0 +1,31 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.conversion;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class MetadataMappingException extends Exception {
public MetadataMappingException(String msg)
{
super(msg);
}
public MetadataMappingException(Exception cause)
{
super(cause);
}
public MetadataMappingException(String msg, Exception cause)
{
super(msg, cause);
}
}

View File

@@ -0,0 +1,649 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.conversion;
import com.hp.hpl.jena.rdf.model.Literal;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.Property;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.vocabulary.RDF;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class MetadataRDFMapping {
private static final Logger log = Logger.getLogger(MetadataRDFMapping.class);
protected final String name;
protected final Pattern fulfills;
protected final List<Resource> results;
protected MetadataRDFMapping(String name, Pattern fulfills, List<Resource> results)
{
this.name = name;
this.fulfills = fulfills;
this.results = results;
}
public static MetadataRDFMapping getMetadataRDFMapping(
Resource mappingResource, String dsoIdentifier)
{
// For better log message: try to get the uri of this mapping.
String uri = null;
if (mappingResource.getURI() != null)
{
uri = " (" + mappingResource.getURI() + ")";
}
if (log.isDebugEnabled())
{
if (uri.equals(""))
{
log.debug("Processing blank node MetadataRDFMapping.");
}
else
{
log.debug("Processing MetadataRDFMapping" + uri + ".");
}
}
// Parse the property DMRM.metadataName
RDFNode nameNode;
try
{
nameNode = getSingularProperty(mappingResource, DMRM.metadataName);
}
catch (IllegalArgumentException ex)
{
log.error("The Property 'metadataName' exists multiple times in one "
+ "DSpaceMetadataRDFMapping, ignoring it" + uri + ".");
return null;
}
if (nameNode == null)
{
log.error("Cannot find property 'metadataName', ignoring mapping" + uri + ".");
return null;
}
if (!nameNode.isLiteral())
{
log.error("Property 'metadataName' is not a literal, ignoring mapping"
+ uri + ".");
return null;
}
String name = nameNode.asLiteral().getLexicalForm();
log.debug("Found mapping name '" + name + "'.");
// Parse the property condition, if it exists.
RDFNode conditionNode;
try
{
conditionNode = getSingularProperty(mappingResource, DMRM.condition);
}
catch (IllegalArgumentException ex)
{
log.error("There are multiple properties 'condition' in one "
+ "DSpaceMetadataRDFMapping, ignoring it" + uri + ".");
return null;
}
String regex = null;
Pattern condition = null;
if (conditionNode != null)
{
if (conditionNode.isLiteral())
{
regex = conditionNode.asLiteral().getLexicalForm();
log.debug("Found property condition '" + regex + "'.");
} else {
log.error("Property 'condition' is not a literal, ignoring "
+ "mapping" + uri + ".");
return null;
}
} else {
// there is no property "condition". As this property is optional
// there is nothing to be done here.
log.debug("Didn't find a property \"condition\".");
}
if (regex != null)
{
try
{
condition = Pattern.compile(regex);
}
catch (PatternSyntaxException ex)
{
log.error("Property 'condition' does not specify a valid java "
+ "regex pattern. Will ignore mapping" + uri + ".", ex);
return null;
}
}
// parse all properties DMRM.creates.
List<Resource> results = new ArrayList<>();
StmtIterator mappingIter = mappingResource.listProperties(DMRM.creates);
if (!mappingIter.hasNext())
{
log.warn("No 'creates' property in a DSpaceMetadataRDFMapping, "
+ "ignonring it" + uri + ".");
return null;
}
while (mappingIter.hasNext())
{
RDFNode result = mappingIter.nextStatement().getObject();
if (!result.isResource())
{
log.error("Mapping result" + uri + " is a Literal not a resource. "
+ "Ignoring mapping.");
return null;
}
results.add(result.asResource());
}
// create mapping
return new MetadataRDFMapping(name, condition, results);
}
public boolean matchesName(String name)
{
return StringUtils.equalsIgnoreCase(this.name, name);
}
public boolean fulfills(String value)
{
// if fulfills exists, we have to check the field value
if (this.fulfills == null)
{
return true;
}
if (!this.fulfills.matcher(value).matches())
{
log.debug("Value '" + value + "' does not match regex '" + fulfills.toString() + "'.");
return false;
} else {
return true;
}
//return this.fulfills.matcher(value).matches();
}
public void convert(String value, String lang, String dsoIRI, Model m)
{
log.debug("Using convertion for field " + name + " on value: " + value
+ " for " + dsoIRI + ".");
// run over all results
for (Iterator<Resource> iter = this.results.iterator() ; iter.hasNext() ; )
{
try {
compileResult(m, iter.next(), dsoIRI, name, value, lang);
} catch (MetadataMappingException ex) {
log.error(ex.getMessage() + " Will ignore this mapping result.");
}
}
}
protected void compileResult(Model m, Resource result,
String dsoIRI, String name, String value, String lang) throws MetadataMappingException
{
// for better debug messages.
String uri = "";
if (result.isURIResource()) uri = " (" + result.getURI() + ")";
// check the subject
RDFNode subjectNode;
try
{
subjectNode = getSingularProperty(result, DMRM.subject);
}
catch (IllegalArgumentException ex)
{
throw new MetadataMappingException("There are multiple 'subject' "
+ "properties in a mapping result" + uri + ".");
}
if (subjectNode == null)
{
throw new MetadataMappingException("Mapping result" + uri
+ " does not have a subject.");
}
if (!subjectNode.isResource())
{
throw new MetadataMappingException("Subject of a result" + uri
+ " is a Literal not a URIResource.");
}
log.debug("Found subject: " + subjectNode.toString());
// check the predicate
RDFNode predicateNode;
try
{
predicateNode = getSingularProperty(result, DMRM.predicate);
}
catch (IllegalArgumentException ex)
{
throw new MetadataMappingException("There are multiple 'predicate' "
+ "properties in a mapping result" + uri + ".");
}
if (predicateNode == null)
{
throw new MetadataMappingException("Mapping result" + uri
+ " does not have a predicate.");
}
if (!predicateNode.isResource())
{
throw new MetadataMappingException("Predicate of a result" + uri
+ " is a Literal not a URIResource.");
}
log.debug("Found predicate: " + predicateNode.toString());
RDFNode objectNode;
try
{
objectNode = getSingularProperty(result, DMRM.object);
}
catch (IllegalArgumentException ex)
{
throw new MetadataMappingException("There are multiple 'object' "
+ "properties in a mapping result" + uri + ".");
}
if (objectNode == null)
{
throw new MetadataMappingException("Mapping result" + uri
+ " does not have a object.");
}
log.debug("Found object: " + objectNode.toString());
Resource subject = parseSubject(m, subjectNode.asResource(),
dsoIRI, name, value);
if (subject == null)
{
throw new MetadataMappingException("Cannot parse subject of a "
+ "reified statement " + uri + ".");
}
Property predicate = parsePredicate(m, predicateNode.asResource(),
dsoIRI, name, value);
if (predicate == null)
{
throw new MetadataMappingException("Cannot parse predicate of a "
+ "reified statement " + uri + ".");
}
RDFNode object = parseObject(m, objectNode, dsoIRI, name, value, lang);
if (object == null)
{
throw new MetadataMappingException("Cannot parse object of a "
+ "reified statement " + uri + ".");
}
m.add(subject, predicate, object);
}
protected Resource parseSubject(Model m, Resource subject, String dsoIRI,
String name, String value)
{
if (subject.hasProperty(RDF.type, DMRM.ResourceGenerator))
{
String generatedIRI = parseResourceGenerator(subject, value, dsoIRI);
if (generatedIRI == null)
{
log.debug("Generated subject IRI is null.");
return null;
}
log.debug("Subject ResourceGenerator generated '" + generatedIRI + "'.");
return m.createResource(generatedIRI);
}
return subject;
}
protected Property parsePredicate(Model m, Resource predicate, String dsoIRI,
String name, String value)
{
if (predicate.hasProperty(RDF.type, DMRM.ResourceGenerator))
{
String generatedIRI = parseResourceGenerator(predicate, value, dsoIRI);
if (generatedIRI == null)
{
log.debug("Generated predicate IRI is null.");
return null;
}
log.debug("Property ResourceGenerator generated '" + generatedIRI + "'.");
return m.createProperty(generatedIRI);
}
String uri = predicate.getURI();
if (uri == null)
{
log.debug("A result predicate is blank node, but not a "
+ "ResourceGenerator. Ingoring this result.");
return null;
}
return m.createProperty(uri);
}
protected RDFNode parseObject(Model m, RDFNode objectNode, String dsoIRI,
String name, String value, String lang)
{
if (objectNode.isLiteral()) return objectNode;
Resource object = objectNode.asResource();
if (object.hasProperty(RDF.type, DMRM.LiteralGenerator))
{
Literal literalValue = parseLiteralGenerator(m, object, value, lang);
if (literalValue == null) return null;
return literalValue;
}
if (object.hasProperty(RDF.type, DMRM.ResourceGenerator))
{
String generatedIRI = parseResourceGenerator(object, value, dsoIRI);
if (generatedIRI == null)
{
log.debug("Generated predicate IRI is null.");
return null;
}
log.debug("Property ResourceGenerator generated '" + generatedIRI + "'.");
return m.createProperty(generatedIRI);
}
if (object.isAnon())
{
Resource blank = m.createResource();
StmtIterator iter = object.listProperties();
while (iter.hasNext())
{
Statement stmt = iter.nextStatement();
Property predicate = stmt.getPredicate();
// iterate recursive over the object of a blank node.
blank.addProperty(predicate,
parseObject(m, stmt.getObject(), dsoIRI, name, value, lang));
}
return blank;
}
// object is not a literal, is not a blank node, is neither a
// IRIGenerator nor a LiteralGenerator => it must be a Resource => use
// it as it is.
return object;
}
protected String parseResourceGenerator(Resource resourceGenerator,
String value, String dsoIRI)
{
if (resourceGenerator.isURIResource()
&& resourceGenerator.equals(DMRM.DSpaceObjectIRI))
{
return dsoIRI;
}
return parseValueProcessor(resourceGenerator, value);
}
protected Literal parseLiteralGenerator(Model m, Resource literalGenerator,
String value, String lang)
{
if (literalGenerator.isURIResource()
&& literalGenerator.equals(DMRM.DSpaceValue))
{
return m.createLiteral(value);
}
String modifiedValue = parseValueProcessor(literalGenerator, value);
if (modifiedValue == null) return null;
// check if we should produce a typed literal
// Up the RDF spec lang tags are not significant on typed literals, so
// we can ignore them if we have a typed literal.
try
{
RDFNode literalTypeNode = getSingularProperty(literalGenerator, DMRM.literalType);
if (literalTypeNode != null)
{
if (literalTypeNode.isURIResource())
{
return m.createTypedLiteral(modifiedValue,
literalTypeNode.asResource().getURI());
} else {
log.warn("A LiteralGenerator has a property 'literalType' that "
+ "either is a blank node or a Literal. Ignoring it.");
}
}
}
catch (IllegalArgumentException ex)
{
log.error("A LiteralGenerator has multiple properties "
+ "'literalType'. Will ignore them.");
}
// check if a language tag should be generated
String languageTag = null;
try
{
RDFNode langNode = getSingularProperty(literalGenerator, DMRM.literalLanguage);
if (langNode != null)
{
if (langNode.isLiteral())
{
languageTag = langNode.asLiteral().getLexicalForm();
} else {
log.warn("Found a property 'literalLanguage', but its "
+ "object is not a literal! Ignoring it.");
}
}
}
catch (IllegalArgumentException ex)
{
log.warn("A LiteralGenerator has multiple properties "
+ "'literalLanguage'. Will ignore them.");
}
try {
RDFNode dspaceLangNode = getSingularProperty(literalGenerator,
DMRM.dspaceLanguageTag);
if (dspaceLangNode != null)
{
boolean useDSpaceLang = false;
if (dspaceLangNode.isLiteral())
{
try {
useDSpaceLang = dspaceLangNode.asLiteral().getBoolean();
}
catch (Exception ex)
{
/*
* nothing to do here.
*
* this is for sure not the best coding style, but the
* one that works best here as jena throws some undeclared
* RuntimeExceptions if the detection of the boolean fails.
*/
}
}
if (useDSpaceLang && !StringUtils.isEmpty(lang))
{
if (lang.indexOf("_") == 2)
{
languageTag = lang.replaceFirst("_", "-");
} else {
languageTag = lang;
}
}
}
} catch (IllegalArgumentException ex) {
log.error("A LiteralGenerator has multiple properties "
+ "'dspaceLanguageTag'. Will ignore them.");
}
if (languageTag != null) return m.createLiteral(modifiedValue, languageTag);
return m.createLiteral(modifiedValue);
}
protected String parseValueProcessor(Resource valueProcessor, String value)
{
// look if there's a modifier.
RDFNode modifierNode;
try
{
modifierNode = getSingularProperty(valueProcessor, DMRM.modifier);
}
catch (IllegalArgumentException ex)
{
log.error("The ResourceGenerator of a mapping result has "
+ "multiple 'modifier' properties, skipping this result.");
return null;
}
if (modifierNode != null)
{
// in case there is a modifier find its matcher, its replacement and
// modifies the value
if (!modifierNode.isResource())
{
log.error("The modifier of a result is a Literal not an Resource! "
+ "Ingoring this result.");
return null;
}
Resource modifier = modifierNode.asResource();
RDFNode matcherNode;
try
{
matcherNode = getSingularProperty(modifier, DMRM.matcher);
}
catch (IllegalArgumentException ex)
{
log.error("The modifier of a mapping result has multiple "
+ "'matcher' properties. Ignoring this result.");
return null;
}
if (matcherNode == null)
{
log.error("Found a modifier property to a result, but no "
+ "matcher property! Ignoring this result!");
return null;
}
if (!matcherNode.isLiteral())
{
log.error("A matcher of a result modifier is not a Literal! "
+ "Ignoring this result.");
return null;
}
// get the replacement string
RDFNode replacementNode;
try
{
replacementNode = getSingularProperty(modifier, DMRM.replacement);
}
catch (IllegalArgumentException ex)
{
log.error("The modifier of a mapping result has multiple "
+ "'replacement' properties. Ignoring this result.");
return null;
}
if (replacementNode == null)
{
log.error("Found a modifier property to a result, but no "
+ "replacement property! Ignoring this result!");
return null;
}
if (!replacementNode.isLiteral())
{
log.error("A replacement of a result modifier is not a Literal! "
+ "Ignoring this result.");
return null;
}
String matcher = matcherNode.asLiteral().getLexicalForm();
String replacement = replacementNode.asLiteral().getLexicalForm();
try
{
Pattern pattern = Pattern.compile(matcher);
String modifiedValue = pattern.matcher(value).replaceAll(replacement);
log.debug("Found matcher '" + matcher + "'.\n"
+ "Found replacement '" + replacement + "'.\n"
+ "modified '" + value + "' => '" + modifiedValue + "'.");
value = modifiedValue;
}
catch (PatternSyntaxException ex)
{
log.error("Property 'matcher' of a ValueModifider didn't specify a "
+ "valid java regex pattern. Will ignore this result.", ex);
return null;
}
}
// in case there is a modifier, we modified the value. Insert the
// (possibly modified) value in the pattern
RDFNode patternNode;
try
{
patternNode = getSingularProperty(valueProcessor, DMRM.pattern);
}
catch (IllegalArgumentException ex)
{
log.error("The ValueProcessor of a mapping result has "
+ "multiple 'pattern' properties, skipping this result.");
return null;
}
if (patternNode == null)
{
log.debug("Cannot find the property 'pattern' of a "
+ "ValueProcessor, will use \"$DSpaceValue\".");
patternNode = valueProcessor.getModel().createLiteral("$DSpaceValue");
}
if (!patternNode.isLiteral())
{
log.error("A 'pattern' property of a ValueProcessor is not a "
+ "Literal! Skipping this result.");
return null;
}
String pattern = patternNode.asLiteral().getLexicalForm();
String result = pattern.replace("$DSpaceValue", value);
log.debug("Found pattern " + pattern + ".\n"
+ "Created result: " + result);
return result;
}
protected static RDFNode getSingularProperty(Resource r, Property p)
throws IllegalArgumentException
{
List<Statement> stmts = r.listProperties(p).toList();
if (stmts.isEmpty())
{
return null;
}
if (stmts.size() > 1)
{
throw new IllegalArgumentException("Property '" + p.getURI()
+ "' exists multiple times.");
}
return stmts.get(0).getObject();
}
}

View File

@@ -0,0 +1,21 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.conversion;
import com.hp.hpl.jena.rdf.model.Model;
import java.sql.SQLException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
public interface RDFConverter {
public Model convert(Context context, DSpaceObject dso)
throws SQLException, AuthorizeException;
}

View File

@@ -0,0 +1,121 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.conversion;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import java.sql.SQLException;
import java.util.List;
import java.util.ArrayList;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
import org.dspace.rdf.RDFConfiguration;
import org.dspace.services.ConfigurationService;
import org.dspace.utils.DSpace;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class RDFConverterImpl implements RDFConverter
{
protected ConfigurationService configurationService;
protected List<ConverterPlugin> plugins;
private static final Logger log = Logger.getLogger(RDFConverterImpl.class);
public RDFConverterImpl()
{
this.configurationService = new DSpace().getConfigurationService();
this.plugins = new ArrayList<ConverterPlugin>();
String pluginNames[] = RDFConfiguration.getConverterPlugins();
if (pluginNames == null || pluginNames.length == 0)
{
log.error("Cannot load RDF converter plugins!");
throw new RuntimeException("Cannot load rdf converter plugins!");
}
for (String plugin : pluginNames)
{
try
{
Class pluginClass = Class.forName(plugin);
ConverterPlugin pluginInstance =
(ConverterPlugin) pluginClass.newInstance();
pluginInstance.setConfigurationService(this.configurationService);
this.plugins.add(pluginInstance);
}
catch (ClassNotFoundException ex)
{
log.warn("Cannot load plugin '" + plugin
+ "': class not found!", ex);
// if we would ignore a plugin, we would generate incomplete RDF data.
throw new RuntimeException(ex.getMessage(), ex);
}
catch (IllegalAccessException ex)
{
log.warn("Cannot load plugin '" + plugin
+ "': illegal access!", ex);
// if we would ignore a plugin, we would generate incomplete RDF data.
throw new RuntimeException(ex.getMessage(), ex);
}
catch (InstantiationException ex)
{
log.warn("Cannot load plugin '" + plugin
+ "': cannot instantiate the module!", ex);
// if we would ignore a plugin, we would generate incomplete RDF data.
throw new RuntimeException(ex.getMessage(), ex);
}
log.debug("Successfully loaded RDFConverterPlugin "
+ plugin + ".");
}
}
public List<ConverterPlugin> getConverterPlugins()
{
return this.plugins;
}
@Override
public Model convert(Context context, DSpaceObject dso)
throws SQLException, AuthorizeException
{
if (this.plugins.isEmpty())
{
log.warn("No RDFConverterPlugins were loaded, cannot convert any data!");
return null;
}
Model model = ModelFactory.createDefaultModel();
for (ConverterPlugin plugin : this.plugins)
{
if (plugin.supports(dso.getType()))
{
Model convertedData = plugin.convert(context, dso);
if (convertedData != null)
{
model.setNsPrefixes(convertedData);
model.add(convertedData);
convertedData.close();
}
}
}
if (model.isEmpty())
{
model.close();
return null;
} else {
return model;
}
}
}

View File

@@ -0,0 +1,607 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.conversion;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.util.FileManager;
import com.hp.hpl.jena.util.FileUtils;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.LinkedList;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.app.util.Util;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.ItemIterator;
import org.dspace.content.Site;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.rdf.RDFConfiguration;
import org.dspace.rdf.RDFUtil;
import org.dspace.services.ConfigurationService;
import org.dspace.utils.DSpace;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class SimpleDSORelationsConverterPlugin
implements ConverterPlugin
{
public static final String SIMPLE_RELATIONS_PREFIXES_KEY = "rdf.simplerelations.prefixes";
public static final String SIMPLE_RELATIONS_SITE2COMMUNITY_KEY = "rdf.simplerelations.site2community";
public static final String SIMPLE_RELATIONS_COMMUNITY2SITE_KEY = "rdf.simplerelations.community2site";
public static final String SIMPLE_RELATIONS_COMMUNITY2SUBCOMMUNITY_KEY= "rdf.simplerelations.community2subcommunity";
public static final String SIMPLE_RELATIONS_SUBCOMMUNITY2COMMUNITY_KEY= "rdf.simplerelations.subcommunity2community";
public static final String SIMPLE_RELATIONS_COMMUNITY2COLLECTION_KEY = "rdf.simplerelations.community2collection";
public static final String SIMPLE_RELATIONS_COLLECTION2COMMUNITY_KEY = "rdf.simplerelations.collection2community";
public static final String SIMPLE_RELATIONS_COLLECTION2ITEM_KEY = "rdf.simplerelations.collection2item";
public static final String SIMPLE_RELATIONS_ITEM2COLLECTION_KEY = "rdf.simplerelations.item2collection";
public static final String SIMPLE_RELATIONS_ITEM2BITSTREAM_KEY = "rdf.simplerelations.item2bitstream";
private static final Logger log = Logger.getLogger(SimpleDSORelationsConverterPlugin.class);
protected ConfigurationService configurationService;
protected String[] site2community;
protected String[] community2site;
protected String[] community2subcommunity;
protected String[] subcommunity2community;
protected String[] community2collection;
protected String[] collection2community;
protected String[] collection2item;
protected String[] item2collection;
protected String[] item2bitstream;
public SimpleDSORelationsConverterPlugin()
{
site2community = RDFConfiguration.loadConfigurationArray(SIMPLE_RELATIONS_SITE2COMMUNITY_KEY);
community2site = RDFConfiguration.loadConfigurationArray(SIMPLE_RELATIONS_COMMUNITY2SITE_KEY);
community2subcommunity = RDFConfiguration.loadConfigurationArray(SIMPLE_RELATIONS_COMMUNITY2SUBCOMMUNITY_KEY);
subcommunity2community = RDFConfiguration.loadConfigurationArray(SIMPLE_RELATIONS_SUBCOMMUNITY2COMMUNITY_KEY);
community2collection = RDFConfiguration.loadConfigurationArray(SIMPLE_RELATIONS_COMMUNITY2COLLECTION_KEY);
collection2community = RDFConfiguration.loadConfigurationArray(SIMPLE_RELATIONS_COLLECTION2COMMUNITY_KEY);
collection2item = RDFConfiguration.loadConfigurationArray(SIMPLE_RELATIONS_COLLECTION2ITEM_KEY);
item2collection = RDFConfiguration.loadConfigurationArray(SIMPLE_RELATIONS_ITEM2COLLECTION_KEY);
item2bitstream = RDFConfiguration.loadConfigurationArray(SIMPLE_RELATIONS_ITEM2BITSTREAM_KEY);
if (site2community == null)
{
log.warn("SimpleDSORelationsConverterPlugin was unable to load "
+ "configuration to convert relation between the repository "
+ "the repository (SITE) and the top communities.");
}
if (community2site == null)
{
log.warn("SimpleDSORelationsConverterPlugin was unable to load "
+ "configuration to convert relation between "
+ "the top communities and the repository (SITE).");
}
if (community2subcommunity == null)
{
log.warn("SimpleDSORelationsConverterPlugin was unable to load "
+ "configuration to convert relation between "
+ "communities and subcommunities.");
}
if (subcommunity2community == null)
{
log.warn("SimpleDSORelationsConverterPlugin was unable to load "
+ "configuration to convert relation between "
+ "subcommunities and communities.");
}
if (community2collection == null)
{
log.warn("SimpleDSORelationsConverterPlugin was unable to load "
+ "configuration to convert relation between "
+ "communities and collections.");
}
if (collection2community == null)
{
log.warn("SimpleDSORelationsConverterPlugin was unable to load "
+ "configuration to convert relation between "
+ "collections and communities.");
}
if (collection2item == null)
{
log.warn("SimpleDSORelationsConverterPlugin was unable to load "
+ "configuration to convert relation between "
+ "collections and items");
}
if (item2collection == null)
{
log.warn("SimpleDSORelationsConverterPlugin was unable to load "
+ "configuration to convert relation between "
+ "items and collections");
}
if (item2bitstream == null)
{
log.warn("SimpleDSORelationsConverterPlugin was unable to load "
+ "configuration to convert relation between "
+ "items and bitstreams.");
}
}
/**
* Loads the prefixes that should be used by the
* SimpleDSORelationsConverterPlugin. Please remember to close the model
* returned by this method.
* @return A model containing the content of the file used to configure the
* RDF-Prefixes that should be used by this plugin.
*/
protected Model getPrefixes()
{
Model m = ModelFactory.createDefaultModel();
String prefixesPath = configurationService
.getProperty(SIMPLE_RELATIONS_PREFIXES_KEY);
if (!StringUtils.isEmpty(prefixesPath))
{
InputStream is = FileManager.get().open(prefixesPath);
if (is == null)
{
log.warn("Cannot find file '" + prefixesPath + "', ignoring...");
} else {
m.read(is, null, FileUtils.guessLang(prefixesPath));
try {
is.close();
}
catch (IOException ex)
{
// nothing to do here.
}
}
} else {
log.warn("Configuration does not contain path to prefixes file for "
+ "SimpleDSORelationsConverterPlugin. Will proceed without "
+ "prefixes.");
}
return m;
}
@Override
public void setConfigurationService(ConfigurationService configurationService)
{
this.configurationService = configurationService;
}
@Override
public Model convert(Context context, DSpaceObject dso)
throws SQLException
{
switch(dso.getType())
{
case (Constants.SITE) :
{
return convertSite(context, (Site) dso);
}
case (Constants.COMMUNITY) :
{
return convertCommunity(context, (Community) dso);
}
case (Constants.COLLECTION) :
{
return convertCollection(context, (Collection) dso);
}
case (Constants.ITEM) :
{
return convertItem(context, (Item) dso);
}
}
return null;
}
public Model convertSite(Context context, Site site)
throws SQLException
{
if (site2community == null)
{
log.info("Either there was a problem loading the configuration or "
+ "linking from the repository (SITE) to the top level "
+ "communities is disabled. Won't link from the repostitory "
+ "(SITE) to the top level communities.");
return null;
}
Model m = ModelFactory.createDefaultModel();
Model prefixes = this.getPrefixes();
m.setNsPrefixes(prefixes);
prefixes.close();
String myId = RDFUtil.generateIdentifier(context, site);
if (myId == null)
{
return null;
}
Community[] topLevelCommies = Community.findAllTop(context);
for (Community community : topLevelCommies)
{
if (!RDFUtil.isPublicBoolean(context, community))
{
continue;
}
String id = RDFUtil.generateIdentifier(context, community);
if (id == null)
{
continue;
}
for (String link : site2community)
{
m.add(m.createResource(myId),
m.createProperty(link),
m.createResource(id));
}
}
if (m.isEmpty())
{
log.info("There were no public sub communities we could link to.");
m.close();
return null;
}
return m;
}
public Model convertCommunity(Context context, Community community)
throws SQLException
{
if (community2site == null)
{
log.info("Either there was a problem loading the configuration or "
+ "linking from the top level communities to the repository "
+ "(SITE) is disabled. Won't link from the top level "
+ "communities to the repository (SITE).");
}
if (community2subcommunity == null)
{
log.info("Either there was a problem loading the configuration or "
+ "linking from communities to subcommunities was disabled. "
+ "Won't link from communities to subcommunities.");
}
if (subcommunity2community == null)
{
log.info("Either there was a problem loading the configuration or "
+ "linking from subcommunities to communities was disabled. "
+ "Won't link from subcommunities to communities.");
}
if (community2collection == null)
{
log.info("Either there was a problem loading the configuration or "
+ "linking from communities to collections was disabled. "
+ "Won't link from collections to subcommunities.");
}
if (community2site == null && community2subcommunity == null
&& subcommunity2community == null && community2collection == null)
{
return null;
}
Model m = ModelFactory.createDefaultModel();
Model prefixes = this.getPrefixes();
m.setNsPrefixes(prefixes);
prefixes.close();
String myId = RDFUtil.generateIdentifier(context, community);
if (myId == null)
{
return null;
}
// add all parents
DSpaceObject[] parents = community.getAllParents();
// check whether this is a top level community
if (parents.length == 0)
{
parents = new DSpaceObject[] {Site.find(context, Site.SITE_ID)};
}
for (DSpaceObject parent : parents)
{
if (!RDFUtil.isPublicBoolean(context, parent))
{
continue;
}
String id = RDFUtil.generateIdentifier(context, parent);
if (id != null)
{
if (parent instanceof Site)
{
for (String link : community2site)
{
m.add(m.createResource(myId),
m.createProperty(link),
m.createResource(id));
}
}
else if (parent instanceof Community)
{
for (String link : subcommunity2community)
{
m.add(m.createResource(myId),
m.createProperty(link),
m.createResource(id));
}
}
}
}
// add all subcommunities
for (Community sub : community.getSubcommunities())
{
if (!RDFUtil.isPublicBoolean(context, sub))
{
continue;
}
String id = RDFUtil.generateIdentifier(context, sub);
if (id == null)
{
continue;
}
for (String link : community2subcommunity)
{
m.add(m.createResource(myId),
m.createProperty(link),
m.createResource(id));
}
}
// add all collections.
for (Collection col : community.getAllCollections())
{
if (!RDFUtil.isPublicBoolean(context, col))
{
continue;
}
String id = RDFUtil.generateIdentifier(context, col);
if (id == null)
{
continue;
}
for (String link : community2collection)
{
m.add(m.createResource(myId),
m.createProperty(link),
m.createResource(id));
}
}
if (m.isEmpty())
{
m.close();
return null;
}
return m;
}
public Model convertCollection(Context context, Collection collection)
throws SQLException
{
if (collection2community == null)
{
log.info("Either there was a problem loading the configuration or "
+ "linking from collections to communities was disabled. "
+ "Won't link from collections to communities.");
}
if (collection2item == null)
{
log.info("Either there was a problem loading the configuration or "
+ "linking from collections to items was disabled. "
+ "Won't link from collections to items.");
}
if (collection2community == null && collection2item == null)
{
return null;
}
Model m = ModelFactory.createDefaultModel();
Model prefixes = this.getPrefixes();
m.setNsPrefixes(prefixes);
prefixes.close();
String myId = RDFUtil.generateIdentifier(context, collection);
if (myId == null)
{
return null;
}
// add all parents
DSpaceObject[] parents = collection.getCommunities();
for (DSpaceObject parent : parents)
{
if (!RDFUtil.isPublicBoolean(context, parent))
{
continue;
}
String id = RDFUtil.generateIdentifier(context, parent);
if (id != null)
{
for (String link : collection2community)
{
m.add(m.createResource(myId),
m.createProperty(link),
m.createResource(id));
}
}
}
// add all items
ItemIterator items = collection.getAllItems();
while (items.hasNext())
{
String id = RDFUtil.generateIdentifier(context, items.next());
if (id != null)
{
for (String link : collection2item)
{
m.add(m.createResource(myId),
m.createProperty(link),
m.createResource(id));
}
}
}
if (m.isEmpty())
{
m.close();
return null;
}
return m;
}
public Model convertItem(Context context, Item item)
throws SQLException
{
if (item2collection == null)
{
log.info("Either there was a problem loading the configuration or "
+ "linking from items to collections was disabled. "
+ "Won't link from items to collections.");
}
if (item2bitstream == null)
{
log.info("Either there was a problem loading the configuration or "
+ "linking from items to bitstreams was disabled. "
+ "Won't link from items to bitstreams.");
}
if (item2collection == null && item2bitstream == null)
{
return null;
}
Model m = ModelFactory.createDefaultModel();
Model prefixes = this.getPrefixes();
m.setNsPrefixes(prefixes);
prefixes.close();
String myId = RDFUtil.generateIdentifier(context, item);
if (myId == null)
{
return null;
}
// add all parents
Collection[] collections = item.getCollections();
for (DSpaceObject parent : collections)
{
if (!RDFUtil.isPublicBoolean(context, parent))
{
continue;
}
String id = RDFUtil.generateIdentifier(context, parent);
if (id != null)
{
for (String link : item2collection)
{
m.add(m.createResource(myId),
m.createProperty(link),
m.createResource(id));
}
}
}
// add all items
for(Bundle bundle : item.getBundles())
{
// currently link only the original files
// TODO: Discuss if LICENSEs, THUMBNAILs and/or extracted TEXTs
// should be linked/exported as well (and if sutch a feature should
// be configurable).
if (bundle.getName().equals("ORIGINAL"))
{
for (Bitstream bs : bundle.getBitstreams())
{
if (RDFUtil.isPublicBoolean(context, bs))
{
String url = bitstreamURI(bs);
if (url != null)
{
for (String link : item2bitstream)
{
m.add(m.createResource(myId),
m.createProperty(link),
m.createResource(url));
}
}
}
}
}
}
if (m.isEmpty())
{
m.close();
return null;
}
return m;
}
/**
* This methods generataes a link to the provieded Bitstream.
* As bitstreams currently don't get Persistent Identifier in DSpace, we have
* to link them using a link to the repository. This link should work with
* JSPUI and XMLUI (at least it does in DSpace 4.x).
* @param bitstream Bitstream for which a URL should be generated.
* @return The link to the URL or null if the Bistream is is a Community or
* Collection logo.
* @throws SQLException
*/
public String bitstreamURI(Bitstream bitstream)
throws SQLException
{
DSpaceObject parent = bitstream.getParentObject();
if (!(parent instanceof Item))
{
// Bitstream is a community or collection logo.
// we currently ignore those
return null;
}
String dspaceURL = configurationService.getProperty("dspace.url");
String link = "";
try
{
// this currently (DSpace 4.1) works with xmlui and jspui.
link = dspaceURL + "/bitstream/" + parent.getHandle() + "/"
+ bitstream.getSequenceID() + "/"
+ Util.encodeBitstreamName(bitstream.getName(), Constants.DEFAULT_ENCODING);
}
catch (UnsupportedEncodingException ex)
{
throw new RuntimeException("DSpace's default encoding is not supported.", ex);
}
return link;
}
@Override
public boolean supports(int type)
{
switch (type)
{
case (Constants.COLLECTION) :
return true;
case (Constants.COMMUNITY) :
return true;
case (Constants.ITEM) :
return true;
case (Constants.SITE) :
return true;
default :
return false;
}
}
}

View File

@@ -0,0 +1,132 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.conversion;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.util.FileManager;
import com.hp.hpl.jena.util.FileUtils;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import org.apache.log4j.Logger;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.rdf.RDFUtil;
import org.dspace.services.ConfigurationService;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class StaticDSOConverterPlugin
implements ConverterPlugin
{
private static final Logger log = Logger.getLogger(StaticDSOConverterPlugin.class);
public static final String CONSTANT_DATA_FILENAME_KEY_PREFIX = "rdf.constant.data.";
public static final String CONSTANT_DATA_GENERAL_KEY_SUFFIX = "GENERAL";
protected ConfigurationService configurationService;
@Override
public void setConfigurationService(ConfigurationService configurationService) {
this.configurationService = configurationService;
}
@Override
public Model convert(Context context, DSpaceObject dso)
throws SQLException
{
// As we do not use data of any DSpaceObject, we do not have to check
// permissions here. We provide only static data out of configuration
// files.
Model general = this.readFile(CONSTANT_DATA_GENERAL_KEY_SUFFIX,
RDFUtil.generateIdentifier(context, dso));
Model typeSpecific = this.readFile(dso.getTypeText(),
RDFUtil.generateIdentifier(context, dso));
if (general == null)
return typeSpecific;
if (typeSpecific == null)
return general;
typeSpecific.setNsPrefixes(general);
typeSpecific.add(general);
general.close();
return typeSpecific;
}
protected Model readFile(String fileSuffix, String base)
{
String path = configurationService.getProperty(
CONSTANT_DATA_FILENAME_KEY_PREFIX + fileSuffix);
if (path == null)
{
log.error("Cannot find dspace-rdf configuration (looking for "
+ "property " + CONSTANT_DATA_FILENAME_KEY_PREFIX
+ fileSuffix + ")!");
throw new RuntimeException("Cannot find dspace-rdf configuration "
+ "(looking for property " +
CONSTANT_DATA_FILENAME_KEY_PREFIX + fileSuffix + ")!");
}
log.debug("Going to read static data from file '" + path + "'.");
InputStream is = null;
Model staticDataModel = null;
try {
is = FileManager.get().open(path);
if (is == null)
{
log.warn("StaticDSOConverterPlugin cannot find file '" + path
+ "', ignoring...");
return null;
}
staticDataModel = ModelFactory.createDefaultModel();
staticDataModel.read(is, base, FileUtils.guessLang(path));
} finally {
if (is != null)
{
try {
is.close();
}
catch (IOException ex)
{
// nothing to do here.
}
}
}
if (staticDataModel.isEmpty())
{
staticDataModel.close();
return null;
}
return staticDataModel;
}
@Override
public boolean supports(int type)
{
switch (type)
{
case (Constants.COLLECTION) :
return true;
case (Constants.COMMUNITY) :
return true;
case (Constants.ITEM) :
return true;
case (Constants.SITE) :
return true;
default :
return false;
}
}
}

View File

@@ -0,0 +1,209 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.negotiation;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class MediaRange
{
// defined in RFC 2616
public static final double DEFAULT_QVALUE = 1.0;
// RFC 2616 defines syntax of the accept header using several patterns
// the patterns are defined in the parts 2.2, 3.6, 3.7, 3.9 and 14.1 of the rfc
// SEPARATOR: ( ) < > @ , ; : \ " / [ ] ? = { } <space> <tabulator>
// the separators can be used in as class inside square brackets. To be able
// to negate the class, the spearators necessary square brackets are not
// included in the string.
public static final String separators = "()<>@,;:\\\\\"/\\[\\]?={} \\t";
// TOKEN: ANY US ASCII except ctl an separtor
public static final String token = "[\\040-\\0176" + "&&[^" + separators + "]]+";
// "\" followed by any US ASCII character (octets 0 - 177)
public static final String quotedPair = "(?:\\\\[\\00-\\0177])";
// any 8 bit sequence, except CTLs (00-037, 0177) and " (042) but including LWS
public static final String qdtext = "(?:[\\040\\041\\043-\\0176\\0178-\\0377]|"
+ "(?:\\r\\n)?[ \\t]+)";
// ( <"> *(qdtext | quoted-pair) <">
public static final String quotedString = "(?:\"(?:" + qdtext + "|" + quotedPair + ")*\")";
public static final String nonQualityParam = "(?:\\s*;\\s*(?!q\\s*=)(" + token + ")="
+ "(" + token + "|" + quotedString + ")" + ")";
public static final String qualityParam = "(?:;\\s*q\\s*=\\s*(0(?:\\.\\d{0,3})?|1(?:\\.0{0,3})?))";
// group 0 contains the hole matched media range
// group 1 contains the type
// group 2 contains the subtype
// group 3 contains all parameters before the quality parameter if any
// group 4 contains the name of the last parameter before the quality parameter if any
// group 5 contains the value of the last parameter before the quality parameter if any
// group 6 contains the quality value if any
// group 7 contains all parameters after the quality parameter if any
// group 8 contains the name of the last parameter after the quality paremeter if any
// group 9 contains the value of the laster parameter after the quality paremeter if any
public static final String mediaRangeRegex = "(?:(" + token + ")/(" + token + "?)"
+ "(" + nonQualityParam + "*)" + qualityParam + "?(" + nonQualityParam + "*))";
private final static Logger log = Logger.getLogger(MediaRange.class);
protected final String type;
protected final String subtype;
protected final double qvalue;
// would be good to take a Map for the parameters, but if we get multiple
// parameters with the same name, we would have a problem.
protected final List<String> parameterNames;
protected final List<String> parameterValues;
private MediaRange() {
throw new RuntimeException("Default constructor of MediaRange must "
+ "not be called. Use static methods instead.");
}
public MediaRange(String mediarange)
throws IllegalArgumentException, IllegalStateException
{
Pattern mediaRangePattern = Pattern.compile("^" + mediaRangeRegex + "$");
Pattern nonQualityParamPattern = Pattern.compile(nonQualityParam);
Pattern qualityParamPattern = Pattern.compile(qualityParam);
Matcher rangeMatcher = mediaRangePattern.matcher(mediarange.trim());
if (!rangeMatcher.matches())
{
log.warn("Provided media range ('" + mediarange.trim() + "') "
+ "does not comply with RFC 2616.");
throw new IllegalArgumentException("Provided media range ('"
+ mediarange + "') does not comply with RFC 2616.");
}
String type = rangeMatcher.group(1);
String subtype = rangeMatcher.group(2);
if (StringUtils.isEmpty(type) || StringUtils.isEmpty(subtype))
{
throw new IllegalArgumentException("A media range had an unparsable type or subtype.");
}
type = type.trim().toLowerCase();
subtype = subtype.trim().toLowerCase();
if (type.equals("*") && !subtype.equals("*"))
{
throw new IllegalArgumentException("A media range's type cannot "
+ "be wildcarded if its subtype isn't as well.");
}
// initalize with defualt value, parse later
double qvalue = DEFAULT_QVALUE;
// initialize empty lists, parse parameters later
List<String> parameterNames = new ArrayList<>();
List<String> parameterValues = new ArrayList<>();
// parse qvalue
if (!StringUtils.isEmpty(rangeMatcher.group(6)))
{
// parse provided quality value
try
{
qvalue = Double.parseDouble(rangeMatcher.group(6));
}
catch (NumberFormatException ex)
{
// the regex should assure that the qvalue is parseable.
// if we get a NumberFormatException, we did something terribly
// wrong.
log.fatal("A quality value ('" + rangeMatcher.group(6) + "') "
+ "was unparsable. We probably have a problem with our "
+ "regex!", ex);
throw new IllegalStateException(ex);
}
}
// parse parameters
StringBuilder sb = new StringBuilder();
if (!StringUtils.isEmpty(rangeMatcher.group(3)))
{
sb.append(rangeMatcher.group(3));
}
if (!StringUtils.isEmpty(rangeMatcher.group(7)))
{
sb.append(rangeMatcher.group(7));
}
if (sb.length() > 0)
{
String unparsedParameters = sb.toString();
Pattern paramPattern = Pattern.compile(nonQualityParam);
Matcher m = paramPattern.matcher(unparsedParameters);
if (!m.matches())
{
// the mediarange string matched our mediaRangeRegex, but the
// parsed parameters doesn't?!
log.fatal("Unable to parse the parameters ('"
+ unparsedParameters + "') of a previously parsed media "
+ "range!");
throw new IllegalStateException("Run into problems while parsing "
+ "a substring of a previuosly succesfully parsed string.");
}
while (m.find())
{
if (!StringUtils.isEmpty(m.group(1)))
{
parameterNames.add(m.group(1).trim().toLowerCase());
parameterValues.add(StringUtils.isEmpty(m.group(2)) ? "" : m.group(2).trim());
}
}
}
this.type = type;
this.subtype = subtype;
this.qvalue = qvalue;
this.parameterNames = parameterNames;
this.parameterValues = parameterValues;
}
public double getQvalue() {
return this.qvalue;
}
public String getType() {
return type;
}
public String getSubtype() {
return subtype;
}
public List<String> getParameterNames() {
return parameterNames;
}
public List<String> getParameterValues() {
return parameterValues;
}
public boolean typeIsWildcard()
{
return (StringUtils.equals(type, "*"));
}
public boolean subtypeIsWildcard()
{
return (StringUtils.equals(subtype, "*"));
}
}

View File

@@ -0,0 +1,123 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.negotiation;
import java.io.IOException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.dspace.rdf.RDFConfiguration;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class NegotiationFilter implements Filter
{
public static final String ACCEPT_HEADER_NAME = "Accept";
private static final Logger log = Logger.getLogger(NegotiationFilter.class);
@Override
public void init(FilterConfig filterConfig) throws ServletException {
// nothing to todo here.
}
@Override
public void doFilter(ServletRequest request, ServletResponse response,
FilterChain chain)
throws IOException, ServletException
{
try
{
if (!RDFConfiguration.isContentNegotiationEnabled())
{
chain.doFilter(request, response);
return;
}
}
catch (Exception ex)
{
log.warn("Will deliver HTML, as I cannot determine if content "
+ "negotiation should be enabled or not:\n"
+ ex.getMessage(), ex);
chain.doFilter(request, response);
return;
}
if (!(request instanceof HttpServletRequest)
|| !(response instanceof HttpServletResponse))
{
// just pass request and response to the next filter, if we don't
// have a HttpServletRequest.
chain.doFilter(request, response);
return;
}
// cast HttpServletRequest and HttpServletResponse
HttpServletRequest hrequest = (HttpServletRequest) request;
HttpServletResponse hresponse = (HttpServletResponse) response;
String acceptHeader = hrequest.getHeader(ACCEPT_HEADER_NAME);
String handle = null;
String extraPathInfo = null;
String path = hrequest.getPathInfo();
// in JSPUI the pathInfo starts after /handle, in XMLUI it starts with /handle
Pattern handleCheckPattern = Pattern.compile("^/*handle/(.*)$");
Matcher handleCheckMatcher = handleCheckPattern.matcher(path);
if (handleCheckMatcher.matches())
{
// remove trailing /handle
path = handleCheckMatcher.group(1);
}
// we expect the path to be in the form <prefix>/<suffix>/[<stuff>],
// where <prefix> is a handle prefix, <suffix> is the handle suffix
// and <stuff> may be further information.
log.debug("PathInfo: " + path);
if (path == null) path = "";
Pattern pathPattern =
Pattern.compile("^/*([^/]+)/+([^/]+)(?:/*||/+(.*))?$");
Matcher pathMatcher = pathPattern.matcher(path);
if (pathMatcher.matches())
{
handle = pathMatcher.group(1) + "/" + pathMatcher.group(2);
extraPathInfo = pathMatcher.group(3);
}
log.debug("handle: " + handle + "\n" + "extraPathInfo: " + extraPathInfo);
int requestedContent = Negotiator.negotiate(acceptHeader);
if (!Negotiator.sendRedirect(hresponse, handle, extraPathInfo,
requestedContent, false))
{
// as we do content negotiation, we should send a vary caching so
// browsers can adopt their caching strategy
// the method Negotiator.sendRedirect does this only if it actually
// does the redirection itself.
hresponse.setHeader("Vary", "Accept");
// send html as default => no forwarding necessary
chain.doFilter(request, response);
}
}
@Override
public void destroy() {
// nothing to do here.
}
}

View File

@@ -0,0 +1,311 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.negotiation;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.content.Site;
import org.dspace.rdf.RDFConfiguration;
import org.dspace.utils.DSpace;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class Negotiator {
// Serialiazation codes
public static final int UNSPECIFIED = -1;
public static final int WILDCARD = 0;
public static final int HTML = 1;
public static final int RDFXML = 2;
public static final int TURTLE = 3;
public static final int N3 = 4;
public static final String DEFAULT_LANG="html";
private static final Logger log = Logger.getLogger(Negotiator.class);
public static int negotiate(String acceptHeader)
{
if (acceptHeader == null) return UNSPECIFIED;
String[] mediaRangeSpecs = acceptHeader.split(",");
ArrayList<MediaRange> requestedMediaRanges = new ArrayList<>();
for (String mediaRangeSpec : mediaRangeSpecs)
{
try
{
requestedMediaRanges.add(new MediaRange(mediaRangeSpec));
}
catch (IllegalArgumentException | IllegalStateException ex)
{
log.warn("Couldn't parse part of an AcceptHeader, ignoring it.\n"
+ ex.getMessage(), ex);
}
}
if (requestedMediaRanges.isEmpty())
{
return UNSPECIFIED;
}
Collections.sort(requestedMediaRanges, getMediaRangeComparator());
Collections.reverse(requestedMediaRanges);
if (log.isDebugEnabled())
{
StringBuilder sb = new StringBuilder("Parsed Accept header '" + acceptHeader + "':\n");
for (Iterator<MediaRange> it = requestedMediaRanges.iterator(); it.hasNext(); )
{
MediaRange mr = it.next();
sb.append(mr.getType()).append("/").append(mr.getSubtype());
sb.append(" has a qvalue of ").append(Double.toString(mr.getQvalue()));
sb.append("\n");
}
log.debug(sb.toString());
}
boolean wildcard = false;
boolean html = false;
boolean rdf = false;
boolean n3 = false;
boolean turtle = false;
Iterator<MediaRange> it = requestedMediaRanges.iterator();
MediaRange lookahead = it.hasNext() ? it.next() : null;
while (lookahead != null)
{
double qvalue = lookahead.getQvalue();
String type = lookahead.getType();
String subtype = lookahead.getSubtype();
lookahead = it.hasNext() ? it.next() : null;
if (qvalue <= 0.0)
{
// a quality of 0.0 means that the defined media range should
// not to be send => don't parse it.
continue;
}
if ("*".equals(type))
{
wildcard = true;
}
if (("text".equals(type) && "html".equals(subtype))
|| ("application".equals(type) && "xhtml+xml".equals(subtype)))
{
html = true;
}
if ("application".equals(type) && "rdf+xml".equals(subtype))
{
rdf = true;
}
if (("text".equals(type) && "n3".equals(subtype))
|| ("text".equals(type) && "rdf+n3".equals(subtype))
|| ("application".equals(type) && "n3".equals(subtype)))
{
n3 = true;
}
if (("text".equals(type) && "turtle".equals(subtype))
|| ("application".equals(type) && "turtle".equals(subtype))
|| ("application".equals(type) && "x-turtle".equals(subtype))
|| ("application".equals(type) && "rdf+turtle".equals(subtype)))
{
turtle = true;
}
if (lookahead != null
&& qvalue != lookahead.qvalue
&& (wildcard || html || rdf || n3 || turtle))
{
// we've looked over all media range with the same precedence
// and found one, we can serve
break;
}
}
if (html)
{
return HTML;
}
if (wildcard)
{
return WILDCARD;
}
else if (turtle)
{
return TURTLE;
}
else if (n3)
{
return N3;
}
else if (rdf)
{
return RDFXML;
}
return UNSPECIFIED;
}
/**
* Method to get a comparator to compare media ranges regarding their
* content negotiation precedence. Following RFC 2616 a media range is
* higher prioritized then another media range if the first one has a higher
* quality value then the second. If both quality values are equal, the
* media range that is more specific should be used.
*
* <p>Note: this comparator imposes orderings that are inconsistent with
* equals! Caution should be exercised when using it to order a sorted set
* or a sorted map. Take a look at the java.util.Comparator for further
* information.</p>
* @param mediaRangeRegex
* @return A comparator that imposes orderings that are inconsistent with equals!
*/
public static Comparator<MediaRange> getMediaRangeComparator() {
return new Comparator<MediaRange>() {
@Override
public int compare(MediaRange mr1, MediaRange mr2) {
if (Double.compare(mr1.qvalue, mr2.getQvalue()) != 0)
{
return Double.compare(mr1.qvalue, mr2.getQvalue());
}
if (mr1.typeIsWildcard() && mr2.typeIsWildcard()) return 0;
if (mr1.typeIsWildcard() && !mr2.typeIsWildcard()) return -1;
if (!mr1.typeIsWildcard() && mr2.typeIsWildcard()) return 1;
if (mr1.subtypeIsWildcard() && mr2.subtypeIsWildcard()) return 0;
if (mr1.subtypeIsWildcard() && !mr2.subtypeIsWildcard()) return -1;
if (!mr1.subtypeIsWildcard() && mr2.subtypeIsWildcard()) return 1;
// if the quality of two media ranges is equal and both don't
// use an asterisk either as type or subtype, they are equal in
// the sense of content negotiation precedence.
return 0;
}
};
}
public static boolean sendRedirect(HttpServletResponse response, String handle,
String extraPathInfo, int serialization, boolean redirectHTML)
throws IOException
{
if (extraPathInfo == null) extraPathInfo = "";
StringBuilder urlBuilder = new StringBuilder();
String lang = null;
switch (serialization)
{
case (Negotiator.UNSPECIFIED):
case (Negotiator.WILDCARD):
{
lang = DEFAULT_LANG;
break;
}
case (Negotiator.HTML):
{
lang = "html";
break;
}
case (Negotiator.RDFXML):
{
lang = "rdf";
break;
}
case (Negotiator.TURTLE):
{
lang = "turtle";
break;
}
case (Negotiator.N3):
{
lang = "n3";
break;
}
default:
{
lang = DEFAULT_LANG;
break;
}
}
assert (lang != null);
if (StringUtils.isEmpty(handle))
{
log.warn("Handle is empty, set it to Site Handle.");
handle = Site.getSiteHandle();
}
// don't redirect if HTML is requested and content negotiation is done
// in a ServletFilter, as the ServletFilter should just let the request
// pass.
if ("html".equals(lang) && !redirectHTML)
{
return false;
}
// as we do content negotiation and we'll redirect the request, we
// should send a vary caching so browsers can adopt their caching strategy
response.setHeader("Vary", "Accept");
// if html is requested we have to forward to the repositories webui.
if ("html".equals(lang))
{
urlBuilder.append((new DSpace()).getConfigurationService()
.getProperty("dspace.url"));
if (!handle.equals(Site.getSiteHandle()))
{
urlBuilder.append("/handle/");
urlBuilder.append(handle).append("/").append(extraPathInfo);
}
String url = urlBuilder.toString();
log.debug("Will forward to '" + url + "'.");
response.setStatus(HttpServletResponse.SC_SEE_OTHER);
response.setHeader("Location", url);
response.flushBuffer();
return true;
}
// currently we cannot serve statistics as rdf
if("statistics".equals(extraPathInfo))
{
log.info("Cannot send statistics as RDF yet. => 406 Not Acceptable.");
response.sendError(HttpServletResponse.SC_NOT_ACCEPTABLE);
response.flushBuffer();
return true;
}
// load the URI of the dspace-rdf module.
urlBuilder.append(RDFConfiguration.getDSpaceRDFModuleURI());
if (urlBuilder.length() == 0)
{
log.error("Cannot load URL of dspace-rdf module. "
+ "=> 500 Internal Server Error");
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
response.flushBuffer();
return true;
}
// and build the uri to the DataProviderServlet
urlBuilder.append("/handle/").append(handle);
urlBuilder.append("/").append(lang);
String url = urlBuilder.toString();
log.debug("Will forward to '" + url + "'.");
response.setStatus(HttpServletResponse.SC_SEE_OTHER);
response.setHeader("Location", url);
response.flushBuffer();
return true;
}
}

View File

@@ -0,0 +1,22 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.storage;
/**
* Extends the DOIURIGenerator but uses handles as fallback to DOIs.
* @author pbecker
*/
public class DOIHandleURIGenerator
extends DOIURIGenerator
implements URIGenerator
{
protected final static URIGenerator fallback = new HandleURIGenerator();
}

View File

@@ -0,0 +1,70 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.storage;
import org.apache.log4j.Logger;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.identifier.DOI;
import org.dspace.identifier.IdentifierException;
import java.sql.SQLException;
/**
*
* @author pbecker
*/
public class DOIURIGenerator
implements URIGenerator
{
private static final Logger log = Logger.getLogger(DOIURIGenerator.class);
/*
* Currently (August 31 2014, in preparation of DSpace 5.0) DSpace supports DOIs for items only. This fallback
* will be used to generate an URI, whenever no DOI was found that could be used to.
*/
protected final static URIGenerator fallback = new LocalURIGenerator();
@Override
public String generateIdentifier(Context context, int type, int id, String handle, String[] identifiers) throws SQLException {
if (type != Constants.SITE
&& type != Constants.COMMUNITY
&& type != Constants.COLLECTION
&& type != Constants.ITEM)
{
return null;
}
String doi = null;
for (String identifier : identifiers)
{
try
{
doi = DOI.DOIToExternalForm(identifier);
} catch (IdentifierException ex) {
// identifier is not a DOI: no problem, keep on looking.
}
}
if (doi != null) {
return doi;
} else {
log.info("Didn't find a DOI for " + Constants.typeText[type] + ", id " + Integer.toString(id)
+ ", will use fallback URIGenerator.");
return fallback.generateIdentifier(context, type, id, handle, identifiers);
}
}
public String generateIdentifier(Context context, DSpaceObject dso)
throws SQLException
{
return generateIdentifier(context, dso.getType(), dso.getID(), dso.getHandle(), dso.getIdentifiers(context));
}
}

View File

@@ -0,0 +1,66 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.storage;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Site;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
import org.dspace.utils.DSpace;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class HandleURIGenerator implements URIGenerator {
private static final Logger log = Logger.getLogger(HandleURIGenerator.class);
public String generateIdentifier(Context context, int type, int id,
String handle, String[] identifiers)
{
if (type == Constants.SITE)
{
return HandleManager.getCanonicalForm(Site.getSiteHandle());
}
if (type == Constants.COMMUNITY
|| type == Constants.COLLECTION
|| type == Constants.ITEM)
{
if (StringUtils.isEmpty(handle))
{
throw new IllegalArgumentException("Handle is null");
}
log.debug("Generated identifier "
+ HandleManager.getCanonicalForm(handle) + " for "
+ Constants.typeText[type] + " " + Integer.toString(id) + ".");
return HandleManager.getCanonicalForm(handle);
}
return null;
}
@Override
public String generateIdentifier(Context context, DSpaceObject dso)
{
if (dso.getType() != Constants.SITE
&& dso.getType() != Constants.COMMUNITY
&& dso.getType() != Constants.COLLECTION
&& dso.getType() != Constants.ITEM)
{
return null;
}
return generateIdentifier(context, dso.getType(), dso.getID(),
dso.getHandle(), dso.getIdentifiers(context));
}
}

View File

@@ -0,0 +1,68 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.storage;
import java.sql.SQLException;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Site;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
import org.dspace.rdf.RDFConfiguration;
import org.dspace.utils.DSpace;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class LocalURIGenerator implements URIGenerator {
private static final Logger log = Logger.getLogger(LocalURIGenerator.class);
@Override
public String generateIdentifier(Context context, int type, int id,
String handle, String[] identifiers)
throws SQLException
{
String urlPrefix = RDFConfiguration.getDSpaceRDFModuleURI() + "/resource/";
if (type == Constants.SITE)
{
return urlPrefix + Site.getSiteHandle();
}
if (type == Constants.COMMUNITY
|| type == Constants.COLLECTION
|| type == Constants.ITEM)
{
if (StringUtils.isEmpty(handle))
{
throw new IllegalArgumentException("Handle is null");
}
return urlPrefix + handle;
}
return null;
}
@Override
public String generateIdentifier(Context context, DSpaceObject dso) throws SQLException {
if (dso.getType() != Constants.SITE
&& dso.getType() != Constants.COMMUNITY
&& dso.getType() != Constants.COLLECTION
&& dso.getType() != Constants.ITEM)
{
return null;
}
return generateIdentifier(context, dso.getType(), dso.getID(), dso.getHandle(), dso.getIdentifiers(context));
}
}

View File

@@ -0,0 +1,44 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.storage;
import com.hp.hpl.jena.rdf.model.Model;
import java.util.List;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public interface RDFStorage {
/**
* Don't use this method directly, use
* {@link org.dspace.rdf.RDFizer#convert(org.dspace.core.Context,
* org.dspace.content.DSpaceObject) RDFizer.convert(...)} to convert and store DSpaceObjets.
* @param uri Identifier for this DSO
* ({@link org.dspace.rdf.RDFizer#generateIdentifier(org.dspace.core.Context,
* org.dspace.content.DSpaceObject) RDFizer.generateIdentifier(...)}). You can load this model by using this URI.
* @param model The model to store.
* @see org.dspace.rdf.RDFizer;
*/
public void store(String uri, Model model);
/**
* Don't use this method directly, use
* {@link org.dspace.rdf.RDFizer#loadModel(String) RDFizer.loadModel(...)} instead.
* @param uri
* @return
*/
public Model load(String uri);
public void delete(String uri);
public void deleteAll();
public List<String> getAllStoredGraphs();
}

View File

@@ -0,0 +1,227 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.storage;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.NodeFactory;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.query.DatasetFactory;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.sparql.core.DatasetGraph;
import com.hp.hpl.jena.update.GraphStore;
import com.hp.hpl.jena.update.GraphStoreFactory;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.jena.atlas.web.auth.HttpAuthenticator;
import org.apache.jena.atlas.web.auth.SimpleAuthenticator;
import org.apache.jena.web.DatasetGraphAccessor;
import org.apache.jena.web.DatasetGraphAccessorHTTP;
import org.apache.log4j.Logger;
import org.dspace.rdf.RDFConfiguration;
import org.dspace.services.ConfigurationService;
import org.dspace.utils.DSpace;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class RDFStorageImpl
implements RDFStorage
{
private static final Logger log = Logger.getLogger(RDFStorageImpl.class);
private final String GRAPHSTORE_ENDPOINT;
private final String GRAPHSTORE_LOGIN;
private final String GRAPHSTORE_PASSWORD;
private final String SPARQL_ENDPOINT;
private final String SPARQL_LOGIN;
private final String SPARQL_PASSWORD;
private ConfigurationService configurationService;
public RDFStorageImpl()
{
this.configurationService = new DSpace().getConfigurationService();
this.GRAPHSTORE_ENDPOINT = this.configurationService
.getProperty(RDFConfiguration.STORAGE_GRAPHSTORE_ENDPOINT_KEY);
if (StringUtils.isEmpty(this.GRAPHSTORE_ENDPOINT))
{
log.warn("Cannot load Graph Store HTTP Protocol endpoint! Property "
+ RDFConfiguration.STORAGE_GRAPHSTORE_ENDPOINT_KEY + " does not "
+ "exist or is empty.");
throw new RuntimeException("Cannot load Graph Store HTTP Protocol "
+ "endpoint! Property "
+ RDFConfiguration.STORAGE_GRAPHSTORE_ENDPOINT_KEY + " does not "
+ "exist or is empty.");
}
boolean graphstore_use_auth = this.configurationService.getPropertyAsType(
RDFConfiguration.STORAGE_GRAPHSTORE_AUTHENTICATION_KEY, false);
String graphstore_login = this.configurationService.getProperty(
RDFConfiguration.STORAGE_GRAPHSTORE_LOGIN_KEY);
String graphstore_password = this.configurationService.getProperty(
RDFConfiguration.STORAGE_GRAPHSTORE_PASSWORD_KEY);
if (!graphstore_use_auth
|| (graphstore_use_auth && StringUtils.isEmpty(graphstore_login))
|| (graphstore_use_auth && StringUtils.isEmpty(graphstore_password)))
{
this.GRAPHSTORE_LOGIN = null;
this.GRAPHSTORE_PASSWORD = null;
if (graphstore_use_auth)
{
log.warn("The rdf storage is configured to use authentication "
+ "to connect to the Graph Store HTTP Protocol endpoint, "
+ "but no credentials are configured.");
}
} else {
this.GRAPHSTORE_LOGIN = graphstore_login;
this.GRAPHSTORE_PASSWORD = graphstore_password;
}
this.SPARQL_ENDPOINT = RDFConfiguration.getInternalSparqlEndpointAddress();
if (StringUtils.isEmpty(this.SPARQL_ENDPOINT))
{
log.warn("Cannot load internal or public SPARQL endpoint!");
throw new RuntimeException("Cannot load internal or public SPARQL "
+ "endpoint!");
}
boolean sparql_use_auth = this.configurationService.getPropertyAsType(
RDFConfiguration.STORAGE_SPARQL_AUTHENTICATION_KEY, false);
String sparql_login = this.configurationService.getProperty(
RDFConfiguration.STORAGE_SPARQL_LOGIN_KEY);
String sparql_password = this.configurationService.getProperty(
RDFConfiguration.STORAGE_SPARQL_PASSWORD_KEY);
if (!sparql_use_auth
|| (sparql_use_auth && StringUtils.isEmpty(sparql_login))
|| (sparql_use_auth && StringUtils.isEmpty(sparql_password)))
{
this.SPARQL_LOGIN = null;
this.SPARQL_PASSWORD = null;
if (sparql_use_auth)
{
log.warn("The rdf storage is configured to use authentication "
+ "for sparql quries, but no credentials are configured.");
}
} else {
this.SPARQL_LOGIN = sparql_login;
this.SPARQL_PASSWORD = sparql_password;
}
}
@Override
public void store(String uri, Model model)
{
Node graphNode = NodeFactory.createURI(uri);
DatasetGraphAccessor accessor = this.getAccessor();
Dataset ds = DatasetFactory.create(model);
DatasetGraph dsg = ds.asDatasetGraph();
Graph g = dsg.getDefaultGraph();
accessor.httpPut(graphNode, g);
}
public Model load(String uri)
{
Node graphNode = NodeFactory.createURI(uri);
DatasetGraphAccessor accessor = this.getAccessor();
Graph g = accessor.httpGet(graphNode);
if (g == null || g.isEmpty())
{
return null;
}
GraphStore gs = GraphStoreFactory.create(g);
Dataset ds = gs.toDataset();
Model m = ds.getDefaultModel();
return m;
}
protected DatasetGraphAccessor getAccessor()
{
DatasetGraphAccessor accessor;
if (this.GRAPHSTORE_LOGIN != null)
{
HttpAuthenticator httpAuthenticator = new SimpleAuthenticator(
GRAPHSTORE_LOGIN, GRAPHSTORE_PASSWORD.toCharArray());
accessor = new DatasetGraphAccessorHTTP(GRAPHSTORE_ENDPOINT,
httpAuthenticator);
} else {
accessor = new DatasetGraphAccessorHTTP(GRAPHSTORE_ENDPOINT);
}
return accessor;
}
@Override
public void delete(String uri) {
this.getAccessor().httpDelete(NodeFactory.createURI(uri));
}
@Override
public void deleteAll() {
for (String graph : this.getAllStoredGraphs())
{
this.delete(graph);
}
// clean default graph:
this.getAccessor().httpDelete();
}
@Override
public List<String> getAllStoredGraphs() {
String queryString = "SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o } }";
QueryExecution qexec;
if (this.SPARQL_LOGIN != null)
{
HttpAuthenticator httpAuthenticator = new SimpleAuthenticator(
SPARQL_LOGIN, SPARQL_PASSWORD.toCharArray());
qexec = QueryExecutionFactory.sparqlService(SPARQL_ENDPOINT,
queryString, httpAuthenticator);
} else {
qexec = QueryExecutionFactory.sparqlService(SPARQL_ENDPOINT,
queryString);
}
ResultSet rs = qexec.execSelect();
List<String> graphs = Collections.synchronizedList(new ArrayList<String>());
while (rs.hasNext())
{
QuerySolution solution = rs.next();
if (solution.contains("g"))
{
graphs.add(solution.get("g").asResource().getURI());
}
}
qexec.close();
return graphs;
/*
} catch (QueryExceptionHTTP ex)
{
System.err.println("== QUERYEXCEPTIONHTTP ==");
System.err.println(ex.getMessage());
System.err.println(ex.getResponseCode() + ": " + ex.getResponseMessage());
Throwable cause = ex.getCause();
int i = 1;
while (cause != null)
{
System.err.println("Cause " + i + " '" + cause.getClass().getName() + "': " + cause.getMessage());
cause = cause.getCause();
i++;
}
ex.printStackTrace(System.err);
throw new RuntimeException(ex);
}*/
}
}

View File

@@ -0,0 +1,56 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.storage;
import java.sql.SQLException;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
/**
* Please use
* {@link org.dspace.rdf.RDFUtil#generateIdentifier(Context, DSpaceObject)} and
* {@link org.dspace.rdf.RDFUtil#generateGraphURI(Context, DSpaceObject)} to
* get URIs for RDF data.
* Please note that URIs can be generated for DSpaceObjects of the
* type SITE, COMMUNITY, COLLECTION or ITEM only. Currently dspace-rdf
* doesn't support Bundles or Bitstreams as independent entity.
*
* @class{org.dspace.rdf.RDFizer} uses a URIGenerator to generate URIs to
* Identify DSpaceObjects in RDF. You can configure which URIGenerator should be
* used. See DSpace documentation on how to configure RDFizer.
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
* @see org.dspace.rdf.RDFizer
* @see org.dspace.rdf.RDFUtil
*/
public interface URIGenerator {
/**
* Generate a URI that can be used to identify the specified DSpaceObject in
* RDF data. Please note that URIs can be generated for DSpaceObjects of the
* type SITE, COMMUNITY, COLLECTION or ITEM only. Currently dspace-rdf
* doesn't support Bundles or Bitstreams as independent entity. This method
* should work even if the DSpaceObject does not exist anymore.
* @param context
* @param dso
* @return May return null, if no URI could be generated.
* @see org.dspace.rdf.RDFUtil#generateIdentifier(Context, DSpaceObject)
*/
public String generateIdentifier(Context context, int type, int id, String handle, String[] identifiers)
throws SQLException;
/**
* Shortcut for {@code generateIdentifier(context, dso.getType(),
* dso.getID(), dso.getHandle())}.
*
* @param context
* @param dso
* @return May return null, if no URI could be generated.
*/
public String generateIdentifier(Context context, DSpaceObject dso)
throws SQLException;
}

View File

@@ -69,6 +69,11 @@
<filter-name>registered-only</filter-name>
<filter-class>org.dspace.app.webui.filter.RegisteredOnlyFilter</filter-class>
</filter>
<filter>
<filter-name>rdf-content-negotiation</filter-name>
<filter-class>org.dspace.rdf.negotiation.NegotiationFilter</filter-class>
</filter>
<!-- Filter mappings -->
<filter-mapping>
@@ -116,6 +121,10 @@
<url-pattern>/view-workspaceitem</url-pattern>
</filter-mapping>
<filter-mapping>
<filter-name>rdf-content-negotiation</filter-name>
<url-pattern>/handle/*</url-pattern>
</filter-mapping>
<!-- kernel start listener (from impl), starts up the kernel for standalong webapps -->

75
dspace-rdf/pom.xml Normal file
View File

@@ -0,0 +1,75 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.dspace</groupId>
<artifactId>dspace-rdf</artifactId>
<packaging>war</packaging>
<name>DSpace RDF</name>
<description>Parent project for the RDF API and Webapp</description>
<parent>
<groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId>
<version>5.0-SNAPSHOT</version>
<relativePath>..</relativePath>
</parent>
<properties>
<!-- This is the path to the root [dspace-src] directory. -->
<root.basedir>${basedir}/..</root.basedir>
</properties>
<build>
<filters>
<!-- Filter using the properties file defined by dspace-parent POM -->
<filter>${filters.file}</filter>
</filters>
</build>
<dependencies>
<!-- Use jena to create, store and load rdf -->
<dependency>
<groupId>org.apache.jena</groupId>
<artifactId>apache-jena-libs</artifactId>
<type>pom</type>
</dependency>
<dependency>
<groupId>org.dspace</groupId>
<artifactId>dspace-api</artifactId>
</dependency>
<dependency>
<groupId>org.dspace</groupId>
<artifactId>dspace-services</artifactId>
</dependency>
<!-- Spring 3 dependencies -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-web</artifactId>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</dependency>
</dependencies>
</project>

View File

@@ -0,0 +1,242 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.providing;
import com.hp.hpl.jena.rdf.model.Model;
import java.io.IOException;
import java.io.PrintWriter;
import java.sql.SQLException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
import org.dspace.rdf.RDFUtil;
import org.dspace.utils.DSpace;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class DataProviderServlet extends HttpServlet {
protected static final String DEFAULT_LANG = "TURTLE";
private static final Logger log = Logger.getLogger(DataProviderServlet.class);
/**
* Processes requests for both HTTP
* <code>GET</code> and
* <code>POST</code> methods.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException
{
// set all incoming encoding to UTF-8
request.setCharacterEncoding("UTF-8");
// we expect either a path containing only the language information
// or a path in the form /handle/<prefix>/<suffix>[/language].
String lang = this.detectLanguage(request);
String cType = this.detectContentType(request, lang);
String pathInfo = request.getPathInfo();
log.debug("lang = " + lang + ", cType = " + cType + " and pathInfo: " + pathInfo);
if (StringUtils.isEmpty(pathInfo) || StringUtils.countMatches(pathInfo, "/") < 2)
{
String dspaceURI =
(new DSpace()).getConfigurationService().getProperty("dspace.url");
this.serveNamedGraph(dspaceURI, lang, cType, response);
return;
}
// remove trailing slash of the path info and split it.
String[] path = request.getPathInfo().substring(1).split("/");
// if we have 2 slashes or less, we sent repository information (see above)
assert path.length >= 2;
String handle = path[0] + "/" + path[1];
log.debug("Handle: " + handle + ".");
// As we offer a public sparql endpoint, all information that we stored
// in the triplestore is public. It is important to check whether a
// DSpaceObject is readable for a anonym user before storing it in the
// triplestore. It is important to remove DSpaceObjects from the
// triplestore, that gets revoked or become restricted. As this is done
// by RDFizer and RDFUtil we do not have to take care for permissions here!
Context context = null;
DSpaceObject dso = null;
try
{
context = new Context(Context.READ_ONLY);
dso = HandleManager.resolveToObject(context, handle);
}
catch (SQLException ex)
{
log.error("SQLException: " + ex.getMessage(), ex);
context.abort();
// probably a problem with the db connection => send Service Unavailable
response.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
return;
}
catch (IllegalStateException ex)
{
log.error("Cannot resolve handle " + handle
+ ". IllegalStateException:" + ex.getMessage(), ex);
context.abort();
response.sendError(HttpServletResponse.SC_BAD_REQUEST);
return;
}
if (dso == null)
{
log.info("Cannot resolve handle '" + handle + "' to dso. => 404");
context.abort();
response.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
String identifier = null;
try
{
identifier = RDFUtil.generateIdentifier(context, dso);
}
catch (SQLException ex)
{
log.error("SQLException: " + ex.getMessage(), ex);
context.abort();
// probably a problem with the db connection => send Service Unavailable
response.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
return;
}
if (identifier == null)
{
// cannot generate identifier for dso?!
log.error("Cannot generate identifier for " + dso.getTypeText()
+ " " + dso.getID() + "!");
context.abort();
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
return;
}
log.debug("Loading and sending named graph " + identifier + ".");
context.abort();
this.serveNamedGraph(identifier, lang, cType, response);
}
protected void serveNamedGraph(String uri, String lang, String contentType,
HttpServletResponse response)
throws ServletException, IOException
{
Model result = null;
result = RDFUtil.loadModel(uri);
if (result == null || result.isEmpty())
{
response.sendError(HttpServletResponse.SC_NOT_FOUND);
if (result != null) result.close();
log.info("Sent 404 Not Found, as the loaded model was null or "
+ "empty (URI: " + uri + ").");
return;
}
response.setContentType(contentType);
PrintWriter out = response.getWriter();
log.debug("Set content-type to " + contentType + ".");
try {
result.write(out, lang);
}
finally
{
result.close();
out.close();
}
}
protected String detectContentType(HttpServletRequest request, String lang)
{
// It is usefull to be able to overwrite the content type, to see the
// request result directly in the browser. If a parameter "text" is part
// of the request, we send the result with the content type "text/plain".
if (request.getParameter("text") != null) return "text/plain;charset=UTF-8";
if (lang.equalsIgnoreCase("TURTLE")) return "text/turtle;charset=UTF-8";
if (lang.equalsIgnoreCase("n3")) return "text/n3;charset=UTF-8";
if (lang.equalsIgnoreCase("RDF/XML")) return "application/rdf+xml;charset=UTF-8";
if (lang.equalsIgnoreCase("N-TRIPLE")) return "application/n-triples;charset=UTF-8";
throw new IllegalStateException("Cannot set content type for unknown language.");
}
protected String detectLanguage(HttpServletRequest request)
{
String pathInfo = request.getPathInfo();
if (StringUtils.isEmpty(pathInfo)) return DEFAULT_LANG;
String[] path = request.getPathInfo().split("/");
String lang = path[(path.length - 1)];
if (StringUtils.endsWithIgnoreCase(lang, "ttl")) return "TURTLE";
if (StringUtils.equalsIgnoreCase(lang, "n3")) return "N3";
if (StringUtils.equalsIgnoreCase(lang, "rdf")
|| StringUtils.equalsIgnoreCase(lang, "xml"))
{
return "RDF/XML";
}
if (StringUtils.endsWithIgnoreCase(lang, "nt")) return "N-TRIPLE";
return DEFAULT_LANG;
}
/**
* Handles the HTTP
* <code>GET</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
/**
* Handles the HTTP
* <code>POST</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
/**
* Returns a short description of the servlet.
*
* @return a String containing servlet description
*/
public String getServletInfo() {
return "Serves repository content as rdf serialization (RDF/XML, Turtle, N-Triples and N3).";
}
}

View File

@@ -0,0 +1,141 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.providing;
import java.io.IOException;
import java.sql.SQLException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
import org.dspace.rdf.negotiation.Negotiator;
import org.dspace.utils.DSpace;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class LocalURIRedirectionServlet extends HttpServlet
{
public static final String ACCEPT_HEADER_NAME = "Accept";
private final static Logger log = Logger.getLogger(LocalURIRedirectionServlet.class);
/**
* Processes requests for both HTTP
* <code>GET</code> and
* <code>POST</code> methods.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException
{
// we expect a path in the form /resource/<prefix>/<suffix>.
String pathInfo = request.getPathInfo();
log.debug("Pathinfo: " + pathInfo);
if (StringUtils.isEmpty(pathInfo) || StringUtils.countMatches(pathInfo, "/") < 2)
{
log.debug("Path does not contain the expected number of slashes.");
response.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
// remove trailing slash of the path info and split it.
String[] path = request.getPathInfo().substring(1).split("/");
String handle = path[0] + "/" + path[1];
String dspaceURL =
(new DSpace()).getConfigurationService().getProperty("dspace.url");
// Prepare content negotiation
int requestedMimeType = Negotiator.negotiate(request.getHeader(ACCEPT_HEADER_NAME));
Context context = null;
DSpaceObject dso = null;
try
{
context = new Context(Context.READ_ONLY);
dso = HandleManager.resolveToObject(context, handle);
}
catch (SQLException ex)
{
log.error("SQLException: " + ex.getMessage(), ex);
context.abort();
// probably a problem with the db connection => send Service Unavailable
response.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
return;
}
catch (IllegalStateException ex)
{
log.error("Cannot resolve handle " + handle
+ ". IllegalStateException:" + ex.getMessage(), ex);
context.abort();
response.sendError(HttpServletResponse.SC_BAD_REQUEST);
return;
}
if (dso == null)
{
log.info("Cannot resolve handle '" + handle + "' to dso. => 404");
context.abort();
response.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
// close the context and send forward.
context.abort();
Negotiator.sendRedirect(response, handle, "", requestedMimeType, true);
}
/**
* Handles the HTTP
* <code>GET</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
/**
* Handles the HTTP
* <code>POST</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
/**
* Returns a short description of the servlet.
*
* @return a String containing servlet description
*/
public String getServletInfo() {
return "Ensures that URIs used in RDF can be dereferenced.";
}
}

View File

@@ -0,0 +1,30 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.utils;
import org.dspace.app.util.AbstractDSpaceWebapp;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class DSpaceWebapp
extends AbstractDSpaceWebapp
{
public DSpaceWebapp()
{
super("RDF");
}
@Override
public boolean isUI()
{
return false;
}
}

View File

@@ -0,0 +1,71 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
The contents of this file are subject to the license and copyright
detailed in the LICENSE and NOTICE files at the root of the source
tree and available online at
http://www.dspace.org/license/
-->
<web-app xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://java.sun.com/xml/ns/javaee"
xmlns:web="http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd"
xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd"
id="WebApp_ID" version="2.5">
<display-name>RDF Data Provider</display-name>
<context-param>
<description>
The location of the main DSpace configuration file
</description>
<param-name>dspace-config</param-name>
<param-value>${dspace.dir}/config/dspace.cfg</param-value>
</context-param>
<context-param>
<description>
The location of the main DSpace configuration file
</description>
<param-name>dspace.dir</param-name>
<param-value>${dspace.dir}</param-value>
</context-param>
<filter>
<filter-name>dspace.request</filter-name>
<filter-class>org.dspace.utils.servlet.DSpaceWebappServletFilter</filter-class>
</filter>
<filter-mapping>
<filter-name>dspace.request</filter-name>
<url-pattern>/*</url-pattern>
</filter-mapping>
<listener>
<listener-class>org.dspace.app.util.DSpaceContextListener</listener-class>
</listener>
<listener>
<listener-class>org.dspace.servicemanager.servlet.DSpaceKernelServletContextListener</listener-class>
</listener>
<servlet>
<servlet-name>rdf-serialization</servlet-name>
<servlet-class>org.dspace.rdf.providing.DataProviderServlet</servlet-class>
</servlet>
<servlet>
<servlet-name>local-uri-redirection</servlet-name>
<servlet-class>org.dspace.rdf.providing.LocalURIRedirectionServlet</servlet-class>
</servlet>
<servlet-mapping>
<servlet-name>rdf-serialization</servlet-name>
<url-pattern>/handle/*</url-pattern>
</servlet-mapping>
<servlet-mapping>
<servlet-name>local-uri-redirection</servlet-name>
<url-pattern>/resource/*</url-pattern>
</servlet-mapping>
</web-app>

View File

@@ -110,6 +110,15 @@
</init-param>
</filter>
<!--
Declare a filter to do content negotiation in combination for our
Linked Data support.
-->
<filter>
<filter-name>rdf-content-negotiation</filter-name>
<filter-class>org.dspace.rdf.negotiation.NegotiationFilter</filter-class>
</filter>
<!-- Filter mappings ================================================ -->
@@ -136,6 +145,11 @@
<filter-name>SetCharacterEncoding</filter-name>
<servlet-name>spring</servlet-name>
</filter-mapping>
<filter-mapping>
<filter-name>rdf-content-negotiation</filter-name>
<url-pattern>/handle/*</url-pattern>
</filter-mapping>
<!--
- Use the Cocoon debug filter together with the Cocoon demo webapp

View File

@@ -709,8 +709,9 @@ event.dispatcher.default.class = org.dspace.event.BasicDispatcher
#
# event.dispatcher.default.consumers = versioning, browse, discovery, eperson, harvester
#
# uncomment event.consumer.doi.class and event.consumer.doi.filters below and add doi here
# if you want to send metadata updates to your doi registration agency.
# Add doi here if you are using org.dspace.identifier.DOIIdentifierProvider to generate DOIs.
# Adding doi here makes DSpace send metadata updates to your doi registration agency.
# Add rdf here, if you are using dspace-rdf to export your repository content as RDF.
event.dispatcher.default.consumers = versioning, discovery, eperson, harvester
# The noindex dispatcher will not create search or browse indexes (useful for batch item imports)
@@ -738,8 +739,12 @@ event.consumer.harvester.class = org.dspace.harvest.HarvestConsumer
event.consumer.harvester.filters = Item+Delete
# consumer to update metadata of DOIs
#event.consumer.doi.class = org.dspace.identifier.doi.DOIConsumer
#event.consumer.doi.filters = Item+Modify_Metadata
event.consumer.doi.class = org.dspace.identifier.doi.DOIConsumer
event.consumer.doi.filters = Item+Modify_Metadata
# consumer to update the triplestore of dspace-rdf
event.consumer.rdf.class = org.dspace.rdf.RDFConsumer
event.consumer.rdf.filters = All+All
# test consumer for debugging and monitoring
#event.consumer.test.class = org.dspace.event.TestConsumer

View File

@@ -223,6 +223,14 @@
</step>
</command>
<command>
<name>rdfizer</name>
<description>The RDFizer manages the attached triple store if dspace-rdf is enabled</description>
<step>
<class>org.dspace.rdf.RDFizer</class>
</step>
</command>
<command>
<name>registry-loader</name>
<description>Load entries into a registry</description>

View File

@@ -0,0 +1,86 @@
#############################
### GENERAL CONFIGURATION ###
#############################
# Address of the public SPARQL endpoint
public.sparql.endpoint = http://localhost/fuseki/dspace/sparql
# Defines the URIGenerator to be used.
# This defines how the identifiers used in the converted RDF data will look
# like.
URIGenerator = org.dspace.rdf.storage.LocalURIGenerator
# This property sets the class that manage the hole conversion process.
converter = org.dspace.rdf.conversion.RDFConverterImpl
# The following list contains all plugins to use for the conversion.
# All listed plugins will be used, disable or enable a plugin will highly
# influence the result of the conversion.
converter.plugins = org.dspace.rdf.conversion.StaticDSOConverterPlugin, \
org.dspace.rdf.conversion.MetadataConverterPlugin, \
org.dspace.rdf.conversion.SimpleDSORelationsConverterPlugin
converter.DSOtypes = SITE, COMMUNITY, COLLECTION, ITEM
# Configure which class to use to store the converted data.
# Please see the configuration section below regarding the storage class
# you configured here.
storage = org.dspace.rdf.storage.RDFStorageImpl
# Address of the endpoint for the SPARQL 1.1 Graph Store HTTP Protocoll
# This address is used to store data in the triple store.
storage.graphstore.endpoint = http://localhost:3030/dspace/data
# If the SPARQL 1.1. Graph Store HTTP Protocol endpoint requires
# authentication, please set this property to yes.
storage.graphstore.authentication = no
# please set the authentication credentials
#storage.graphstore.login = dspace
#storage.graphstore.password = ecapsd
# Address DSpace should use to query the SPARQL endpoint, g.e. the
# RDFStorageImpl uses this address to determine a list of all stored
# graphs. The SPARQL endpoint can be read-only, all command that changes
# data will be performed using the SPARQL 1.1 Graph Store HTTP Protocoll.
# If this is empty the property "endpoint" will be used instead.
storage.sparql.endpoint = http://localhost:3030/dspace/sparql
# If the internal SPARQL endpoint requires authentication, please set
# this property to yes.
storage.sparql.authentication = no
# please set the authentication credentials
#storage.sparql.login = dspace
#storage.sparql.password = ecapsd
# Set the url of the dspace-rdf module here. This is necessary to use content
# negotiation in dspace-jspui
contextPath = ${dspace.baseUrl}/rdf
# Configure if content negotiation should be enabled
contentNegotiation.enable = true
#############################
### PLUGINS CONFIGURATION ###
#############################
## StaticDSOConverterPlugin ##
# The following properties define files to read static data from.
constant.data.GENERAL = ${dspace.dir}/config/modules/rdf/constant-data-general.ttl
constant.data.COLLECTION = ${dspace.dir}/config/modules/rdf/constant-data-collection.ttl
constant.data.COMMUNITY = ${dspace.dir}/config/modules/rdf/constant-data-community.ttl
constant.data.ITEM = ${dspace.dir}/config/modules/rdf/constant-data-item.ttl
constant.data.SITE = ${dspace.dir}/config/modules/rdf/constant-data-site.ttl
## MetadataConverterPlugin ##
metadata.mappings = ${dspace.dir}/config/modules/rdf/metadata-rdf-mapping.ttl
metadata.schema = file://${dspace.dir}/config/modules/rdf/metadata-rdf-schema.ttl
metadata.prefixes = ${dspace.dir}/config/modules/rdf/metadata-prefixes.ttl
## SimpleDSORelationsConverterPlugin ##
simplerelations.prefixes = ${dspace.dir}/config/modules/rdf/simple-relations-prefixes.ttl
simplerelations.site2community = http://purl.org/dc/terms/hasPart, http://digital-repositories.org/ontologies/dspace/0.1.0#hasCommunity
simplerelations.community2site = http://purl.org/dc/terms/isPartOf, http://digital-repositories.org/ontologies/dspace/0.1.0#isPartOfRepository
simplerelations.community2subcommunity = http://purl.org/dc/terms/hasPart, http://digital-repositories.org/ontologies/dspace/0.1.0#hasSubcommunity
simplerelations.subcommunity2community = http://purl.org/dc/terms/isPartOf, http://digital-repositories.org/ontologies/dspace/0.1.0#isSubcommunityOf
simplerelations.community2collection = http://purl.org/dc/terms/hasPart, \
http://digital-repositories.org/ontologies/dspace/0.1.0#hasCollection
simplerelations.collection2community = http://purl.org/dc/terms/isPartOf, \
http://digital-repositories.org/ontologies/dspace/0.1.0#isPartOfCommunity
simplerelations.collection2item = http://purl.org/dc/terms/hasPart,\
http://digital-repositories.org/ontologies/dspace/0.1.0#hasItem
simplerelations.item2collection = http://purl.org/dc/terms/isPartOf,\
http://digital-repositories.org/ontologies/dspace/0.1.0#isPartOfCollection
simplerelations.item2bitstream = http://purl.org/dc/terms/hasPart,\
http://digital-repositories.org/ontologies/dspace/0.1.0#hasBitstream

0
dspace/config/modules/rdf/.gitignore vendored Normal file
View File

View File

@@ -0,0 +1,3 @@
@prefix bibo: <http://purl.org/ontology/bibo/> .
<> a bibo:Collection .

View File

@@ -0,0 +1,3 @@
@prefix bibo: <http://purl.org/ontology/bibo/> .
<> a bibo:Collection .

View File

@@ -0,0 +1,7 @@
@prefix void: <http://rdfs.org/ns/void#> .
@prefix foaf: <http://xmlns.com/foaf/0.1/> .
<> foaf:homepage <http://localhost:8080/jspui> ;
void:sparqlEndpoint <http://localhost/fuseki/dspace/sparql> ;
.

View File

@@ -0,0 +1,58 @@
@prefix fuseki: <http://jena.apache.org/fuseki#> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix tdb: <http://jena.hpl.hp.com/2008/tdb#> .
@prefix ja: <http://jena.hpl.hp.com/2005/11/Assembler#> .
@prefix : <#> .
###########################
#### Server description ###
###########################
[] rdf:type fuseki:Server ;
# You can set set general query timeouts on a server-wide basis:
# Format 1: "1000" -- 1 second timeout
# Format 2: "10000,60000" -- 10s timeout to first result, then 60s timeout to for rest of query.
# See java doc for ARQ.queryTimeout
# ja:context [ ja:cxtName "arq:queryTimeout" ; ja:cxtValue "10000" ] ;
# Load custom code (rarely needed)
# ja:loadClass "your.code.Class" ;
# Services available. Only explicitly listed services are configured.
# If there is a service description not linked from this list, it is ignored.
fuseki:services (
<#service1>
) .
################################
### Assembler initialization ###
################################
# Declaration additional assembler items.
[] ja:loadClass "com.hp.hpl.jena.tdb.TDB" .
# TDB
tdb:DatasetTDB rdfs:subClassOf ja:RDFDataset .
tdb:GraphTDB rdfs:subClassOf ja:Model .
#################
### Service 1 ###
#################
<#service1> rdf:type fuseki:Service ;
fuseki:name "dspace" ; # http://host:port/tests
fuseki:serviceQuery "sparql" ; # SPARQL query service
fuseki:serviceReadWriteGraphStore "data" ; # SPARQL Graph store protocol (read and write)
# A separate read-only graph store endpoint:
fuseki:serviceReadGraphStore "get" ; # SPARQL Graph store protocol (read only)
# Other endpoints you may add:
# fuseki:serviceQuery "query" ; # SPARQL query service
# fuseki:serviceUpdate "update" ; # SPARQL query service
# fuseki:serviceUpload "upload" ; # Non-SPARQL upload service
fuseki:dataset <#dspace-triplestore> ;
.
<#dspace-triplestore> rdf:type tdb:DatasetTDB ;
tdb:location "${dspace.dir}/triplestore" ;
# Make the default graph be the union of all named graphs.
tdb:unionDefaultGraph true ;
.

View File

@@ -0,0 +1,4 @@
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
@prefix bibo: <http://purl.org/ontology/bibo/> .
@prefix dc: <http://purl.org/dc/elements/1.1/> .
@prefix dcterms: <http://purl.org/dc/terms/> .

View File

@@ -0,0 +1,428 @@
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix owl: <http://www.w3.org/2002/07/owl#> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
@prefix dc: <http://purl.org/dc/elements/1.1/> .
@prefix dcterms: <http://purl.org/dc/terms/> .
@prefix bibo: <http://purl.org/ontology/bibo/> .
@prefix dm: <http://digital-repositories.org/ontologies/dspace-metadata-mapping/0.2.0#> .
@prefix : <#> .
:title
dm:metadataName "dc.title" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:title ;
dm:object dm:DSpaceValue ;
] ;
.
:titleAlternative
dm:metadataName "dc.title.alternative" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:alternative ;
dm:object dm:DSpaceValue ;
] ;
.
:author
dm:metadataName "dc.contributor.author" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dc:creator ;
dm:object dm:DSpaceValue ;
] ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dc:contributor ;
dm:object dm:DSpaceValue ;
] ;
.
:contributor
dm:metadataName "dc.contributor" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dc:contributor ;
dm:object dm:DSpaceValue ;
] ;
.
:available
dm:metadataName "dc.date.available" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:available ;
dm:object [
a dm:LiteralGenerator ;
dm:pattern "$DSpaceValue" ;
dm:literalType xsd:dateTime ;
] ;
] ;
.
:dateCopyrighted
dm:metadataName "dc.date.copyright" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:dateCopyrighted ;
dm:object [
a dm:LiteralGenerator ;
dm:pattern "$DSpaceValue" ;
dm:literalType xsd:dateTime ;
] ;
] ;
.
:created
dm:metadataName "dc.date.created" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:created ;
dm:object [
a dm:LiteralGenerator ;
dm:pattern "$DSpaceValue" ;
dm:literalType xsd:dateTime ;
] ;
];
.
:issued
dm:metadataName "dc.date.issued" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:issued ;
dm:object dm:DSpaceValue ; # don't use xsd:date as we cannot assure format.
];
.
:dateSubmitted
dm:metadataName "dc.date.submitted" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:dateSubmitted ;
dm:object [
a dm:LiteralGenerator ;
dm:pattern "$DSpaceValue" ;
dm:literalType xsd:dateTime ;
] ;
];
.
:modified
dm:metadataName "dc.date.updated" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:modified ;
dm:object [
a dm:LiteralGenerator ;
dm:pattern "$DSpaceValue" ;
dm:literalType xsd:dateTime ;
] ;
];
.
:date
dm:metadataName "dc.date" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dc:date ;
dm:object [
a dm:LiteralGenerator ;
dm:pattern "$DSpaceValue" ;
dm:literalType xsd:dateTime ;
] ;
];
.
:abstract
dm:metadataName "dc.description.abstract" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:abstract ;
dm:object [
a dm:LiteralGenerator ;
dm:pattern "$DSpaceValue" ;
dm:dspaceLanguageTag "true"^^xsd:boolean ;
] ;
];
.
:format
dm:metadataName "dc.format" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dc:format ;
dm:object dm:DSpaceValue ;
];
.
:doi
dm:metadataName "dc.identifier.uri" ;
dm:condition "^http://dx.doi.org/.+" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate bibo:doi ;
dm:object [
a dm:LiteralGenerator ;
dm:modifier [
dm:matcher "^http://dx.doi.org/(.*)$" ;
dm:replacement "doi:$1";
];
dm:pattern "$DSpaceValue";
];
];
.
:handle
dm:metadataName "dc.identifier.uri" ;
dm:condition "^http://hdl.handle.net/.*$" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate bibo:handle ;
dm:object [
a dm:LiteralGenerator ;
dm:modifier [
dm:matcher "^http://hdl.handle.net/(.*)$" ;
dm:replacement "hdl:$1";
];
];
];
.
:localHandleURI
dm:metadataName "dc.identifier.uri" ;
dm:condition "^http://localhost:8080/jspui/handle/" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate bibo:handle ;
dm:object [
a dm:LiteralGenerator ;
dm:modifier [
dm:matcher "^http://localhost:8080/jspui/handle/(.*)$" ;
dm:replacement "hdl:$1";
];
dm:pattern "$DSpaceValue";
];
];
.
:citation
dm:metadataName "dc.identifier.citation" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:bibliographicCitation ;
dm:object dm:DSpaceValue ;
];
.
:isbn
dm:metadataName "dc.identifier.isbn" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate bibo:issn ;
dm:object dm:DSpaceValue ;
];
.
:issn
dm:metadataName "dc.identifier.issn" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate bibo:issn ;
dm:object dm:DSpaceValue ;
];
.
:sici
dm:metadataName "dc.identifier.sici" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate bibo:sici ;
dm:object dm:DSpaceValue ;
];
.
:uri
dm:metadataName "dc.identifier.uri" ;
dm:condition "^((?!http://dx.doi.org/)(?!http://hdl.handle.net/)(?!http://localhost:8080/jspui/handle/)).*" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate bibo:uri ;
dm:object [
a dm:ResourceGenerator ;
dm:pattern "$DSpaceValue" ;
] ;
];
.
:languageISO
dm:metadataName "dc.language.iso" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dc:language ;
dm:object [
a dm:LiteralGenerator ;
dm:modifier [
dm:matcher "^(..)_(.*)$" ;
dm:replacement "$1-$2";
];
dm:pattern "$DSpaceValue";
];
];
.
:language
dm:metadataName "dc.language" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dc:language ;
dm:object dm:DSpaceValue ;
];
.
:publisher
dm:metadataName "dc.publisher" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dc:publisher ;
dm:object dm:DSpaceValue ;
];
.
:hasPart
dm:metadataName "dc.relation.haspart";
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:hasPart ;
dm:object dm:DSpaceValue ;
];
.
:hasVersion
dm:metadataName "dc.relation.hasversion" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:hasVersion ;
dm:object dm:DSpaceValue ;
];
.
:references
dm:metadataName "dc.relation.isbasedon" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:references ;
dm:object dm:DSpaceValue ;
];
.
:isFormatOf
dm:metadataName "dc.relation.isformatof" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:isFormatOf ;
dm:object dm:DSpaceValue ;
];
.
:isPartOf
dm:metadataName "dc.relation.ispartof" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:isPartOf ;
dm:object dm:DSpaceValue ;
];
.
:isReferencedBy
dm:metadataName "dc.relation.isreferencedby";
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:isReferencedBy ;
dm:object dm:DSpaceValue ;
];
.
:isReplacedBy
dm:metadataName "dc.relation.isreplacedby";
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:isReplacedBy ;
dm:object dm:DSpaceValue ;
];
.
:isVersionOf
dm:metadataName "dc.relation.isversionof";
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:isVersionOf ;
dm:object dm:DSpaceValue ;
];
.
:replaces
dm:metadataName "dc.relation.replaces";
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:replaces ;
dm:object dm:DSpaceValue ;
];
.
:requires
dm:metadataName "dc.relation.requires";
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:requires ;
dm:object dm:DSpaceValue ;
];
.
:relationURI
dm:metadataName "dc.relation.uri";
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:relation ;
dm:object [
a dm:ResourceGenerator ;
dm:pattern "$DSpaceValue" ;
];
];
.
:relation
dm:metadataName "dc.relation.hasPart";
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:relation ;
dm:object dm:DSpaceValue ;
];
.
:rightsURI
dm:metadataName "dc.rights.uri";
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dcterms:rights ;
dm:object [
a dm:ResourceGenerator ;
dm:pattern "$DSpaceValue" ;
];
];
.
:rights
dm:metadataName "dc.rights" ;
dm:creates [
dm:subject dm:DSpaceObjectIRI ;
dm:predicate dc:rights ;
dm:object dm:DSpaceValue ;
];
.

View File

@@ -0,0 +1,176 @@
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
@prefix owl: <http://www.w3.org/2002/07/owl#> .
@prefix dc: <http://purl.org/dc/elements/1.1/> .
@prefix : <http://digital-repositories.org/ontologies/dspace-metadata-mapping/0.2.0#> .
<http://digital-repositories.org/ontologies/dspace-metadata-mapping/0.2.0>
rdfs:comment "Vocabulary for describing mappings of DSpace metadata to rdf." ;
dc:contributor "Pascal-Nicolas Becker" ;
dc:title "DSpace Metadata RDF Mapping Spec" ;
dc:description "Vocabulary for describing mappings of DSpace metadata to RDF. This vocabulary is used to configure DSpace how to convert stored metadata into RDF." ;
dc:date "2014-04-18" ;
.
:DSpaceMetadataRDFMapping
a rdfs:Class ;
rdfs:label "DSpace Metadata RDF Mapping" ;
rdfs:comment "Represents the mapping of a DSpace metadata value to an RDF equivalent." ;
.
:Result
a rdfs:Class ;
rdfs:subClassOf rdf:Statement ;
rdfs:label "DSpace Metadata RDF Mapping Result" ;
rdfs:comment "A reified statement that describes the result of the DSpaceMetadataRDFMapping." ;
.
:ValueProcessor
a rdfs:Class ;
rdfs:label "DSpace metadata value processor" ;
rdfs:comment "Processes a metadata value into an RDF value or an IRI." ;
.
:ValueModifier
a rdfs:Class ;
rdfs:label "Java Regex" ;
rdfs:comment "A regular expression to be used with java, composed of a matching and a replaying expression." ;
.
:LiteralGenerator
a rdfs:Class ;
rdfs:subClassOf :ValueProcessor ;
rdfs:label "DSpace metadata value literal generator" ;
rdfs:comment "Generates a literal depending on a DSpace metadata value." ;
.
:ResourceGenerator
a rdfs:Class ;
rdfs:subClassOf :ValueProcessor ;
rdfs:label "DSpace metadata value resource generator" ;
rdfs:comment "Generates an IRI used for a rdfs:Resource depending on the converted DSpace Object and one of its metadata values." ;
.
:DSpaceObjectIRI
a rdfs:Resource ;
rdf:type :ResourceGenerator ;
rdf:type :ValueProcessor ;
rdfs:label "DSpace Object IRI" ;
rdfs:comment "Placeholder for the IRI of the DSpace Object that gets converted." ;
.
:DSpaceValue
a rdfs:Resource ;
rdf:type :LiteralGenerator ;
rdf:type :ValueProcessor ;
rdfs:label "DSpace Metadata Value" ;
rdfs:comment "Shortcut to generate a Literal containing an unchanged metadata value." ;
.
:creates
a rdf:Property ;
rdfs:label "Result" ;
rdfs:comment "Specifies the RDF to generate for a specified matadata." ;
rdfs:domain :DSpaceMetadataRDFMapping ;
rdfs:range :Result ;
.
:subject
a rdf:Property ;
rdfs:subPropertyOf rdf:subject ;
rdfs:label "Subject" ;
rdfs:comment "The subject of a DSpace metadata RDF mapping result." ;
rdfs:domain :Result ;
.
:predicate
a rdf:Property ;
rdfs:subPropertyOf rdf:predicate ;
rdfs:label "Subject" ;
rdfs:comment "The predicate of a DSpace metadata RDF mapping result." ;
rdfs:domain :Result ;
.
:object
a rdf:Property ;
rdfs:subPropertyOf rdf:object ;
rdfs:label "Object" ;
rdfs:comment "The object of a DSpace metadata RDF mapping result." ;
rdfs:domain :Result ;
.
:metadataName
a rdf:Property ;
rdfs:label "Metadata name" ;
rdfs:comment "The name of the metadata to convert (g.e. dc.title)." ;
rdfs:domain :DSpaceMetadataRDFMapping ;
rdfs:range rdfs:Literal ;
.
:condition
a rdf:Property ;
rdfs:label "Regex" ;
rdfs:comment "A regex that the metadata value has to fulfill if the mapping should become active." ;
rdfs:domain :DSpaceMetadataRDFMapping ;
rdfs:range rdfs:Literal ;
.
:modifier
a rdf:Property ;
rdfs:label "Value modifier" ;
rdfs:comment "Information how the metadata value should be modified before it is inserted in the pattern." ;
rdfs:domain :ValueProcessor ;
rdfs:range :ValueModifier ;
.
:matcher
a rdf:Property ;
rdfs:label "matching regex" ;
rdfs:comment "A regex that matches those subsequences of a metadata value, that should be replaced." ;
rdfs:domain rdfs:ValueModifier ;
rdfs:range rdfs:Literal;
.
:replacement
a rdf:Property ;
rdfs:label "replacing regex" ;
rdfs:comment "A regex that replaces previously matched subsequences of a metadata value." ;
rdfs:domain :ValueModifier ;
rdfs:range rdfs:Literal ;
.
:pattern
a rdf:Property ;
rdfs:label "Pattern" ;
rdfs:comment "A pattern that contains $DSpaceValue as placeholder for the metadata value." ;
rdfs:domain :ValueProcessor ;
rdfs:range rdfs:Literal ;
.
:literalType
a rdf:Property ;
rdfs:label "Literal Type" ;
rdfs:comment "Defines the datatype a generated literal gets." ;
rdfs:domain :LiteralGenerator ;
rdfs:range rdf:datatype ;
.
:literalLanguage
a rdf:Property ;
rdfs:label "Language" ;
rdfs:comment "Defines the language a literal uses. Maybe overridden by #dspaceLanguageTag." ;
rdfs:domain :LiteralGenerator ;
rdfs:range rdfs:Literal ;
.
:dspaceLanguageTag
a rdf:Property ;
rdfs:label "DSpace Language Tag";
rdfs:comment "Defines to use the language tag of a DSpace metadata value.";
rdfs:domain :LiteralGenerator ;
rdfs:range xsd:boolean ;
.

View File

@@ -0,0 +1,2 @@
@prefix dspace: <http://digital-repositories.org/ontologies/dspace/0.1.0#> .
@prefix dcterms: <http://purl.org/dc/terms/> .

View File

@@ -70,6 +70,17 @@
<module>jspui</module>
</modules>
</profile>
<profile>
<id>dspace-rdf</id>
<activation>
<file>
<exists>rdf/pom.xml</exists>
</file>
</activation>
<modules>
<module>rdf</module>
</modules>
</profile>
<profile>
<id>dspace-rest</id>
<activation>

View File

@@ -0,0 +1,88 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.dspace.modules</groupId>
<artifactId>rdf</artifactId>
<packaging>war</packaging>
<name>DSpace RDF :: Local Customizations</name>
<description>
Overlay RDF customizations
</description>
<parent>
<artifactId>modules</artifactId>
<groupId>org.dspace</groupId>
<version>5.0-SNAPSHOT</version>
<relativePath>..</relativePath>
</parent>
<properties>
<!-- This is the path to the root [dspace-src] directory. -->
<root.basedir>${basedir}/../../..</root.basedir>
</properties>
<build>
<filters>
<!-- Filter using the properties file defined by dspace-parent POM -->
<filter>${filters.file}</filter>
</filters>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-war-plugin</artifactId>
<configuration>
<archiveClasses>false</archiveClasses>
<webResources>
<resource>
<filtering>true</filtering>
<directory>${basedir}/src/main/webapp</directory>
<includes>
<include>WEB-INF/web.xml</include>
</includes>
</resource>
</webResources>
</configuration>
<executions>
<execution>
<phase>prepare-package</phase>
</execution>
</executions>
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>oracle-support</id>
<activation>
<property>
<name>db.name</name>
<value>oracle</value>
</property>
</activation>
<dependencies>
<dependency>
<groupId>com.oracle</groupId>
<artifactId>ojdbc6</artifactId>
</dependency>
</dependencies>
</profile>
</profiles>
<dependencies>
<dependency>
<groupId>org.dspace.modules</groupId>
<artifactId>additions</artifactId>
</dependency>
<dependency>
<groupId>org.dspace</groupId>
<artifactId>dspace-rdf</artifactId>
<type>war</type>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
</project>

View File

View File

@@ -698,6 +698,10 @@ Common usage:
<fileset dir="${dspace.dir}/webapps/jspui/" />
</war>
<war destfile="${dspace.dir}/webapps/rdf.war">
<fileset dir="${dspace.dir}/webapps/rdf/"/>
</war>
<war destfile="${dspace.dir}/webapps/rest.war">
<fileset dir="${dspace.dir}/webapps/rest/"/>
</war>
@@ -728,6 +732,8 @@ Common usage:
<mkdir dir="${dspace.dir}/webapps" />
<mkdir dir="${dspace.dir}/triplestore" />
<mkdir dir="${org.dspace.app.itemexport.work.dir}" />
<mkdir dir="${org.dspace.app.itemexport.download.dir}" />

44
pom.xml
View File

@@ -33,6 +33,7 @@
<java.version>1.7</java.version>
<lucene.version>4.4.0</lucene.version>
<solr.version>4.4.0</solr.version>
<jena.version>2.12.0</jena.version>
<slf4j.version>1.6.1</slf4j.version>
<!-- 'root.basedir' is the path to the root [dspace-src] dir. It must be redefined by each child POM,
as it is used to reference the LICENSE_HEADER and *.properties file(s) in that directory. -->
@@ -202,11 +203,12 @@
</excludes>
<mapping>
<!-- Custom DSpace file extensions which are not recognized by maven-release-plugin:
*.xmap, *.xslt, *.wsdd, *.wsdl, *.LICENSE -->
*.xmap, *.xslt, *.wsdd, *.wsdl, *.ttl, *.LICENSE -->
<xmap>XML_STYLE</xmap>
<xslt>XML_STYLE</xslt>
<wsdd>XML_STYLE</wsdd>
<wsdl>XML_STYLE</wsdl>
<ttl>SCRIPT_STYLE</ttl>
<LICENSE>TEXT</LICENSE>
</mapping>
<encoding>UTF-8</encoding>
@@ -452,6 +454,21 @@
</modules>
</profile>
<!--
Builds RDF API and Data Provider WAR for DSpace
-->
<profile>
<id>dspace-rdf</id>
<activation>
<file>
<exists>dspace-rdf/pom.xml</exists>
</file>
</activation>
<modules>
<module>dspace-rdf</module>
</modules>
</profile>
<!-- REST Jersey -->
<profile>
<id>dspace-rest</id>
@@ -571,12 +588,13 @@
<!-- Activate all modules *except* for the 'dspace' module,
as it does not include any Java source code to release. -->
<modules>
<module>dspace-api</module>
<module>dspace-jspui</module>
<module>dspace-api</module>
<module>dspace-jspui</module>
<module>dspace-xmlui-mirage2</module>
<module>dspace-xmlui</module>
<module>dspace-lni</module>
<module>dspace-oai</module>
<module>dspace-rdf</module>
<module>dspace-rest</module>
<module>dspace-sword</module>
<module>dspace-swordv2</module>
@@ -694,6 +712,12 @@
<artifactId>dspace-services</artifactId>
<version>5.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.dspace</groupId>
<artifactId>dspace-rdf</artifactId>
<version>5.0-SNAPSHOT</version>
<type>war</type>
</dependency>
<dependency>
<groupId>org.dspace</groupId>
<artifactId>dspace-rest</artifactId>
@@ -809,6 +833,12 @@
<artifactId>lucene-queryparser</artifactId>
<version>${lucene.version}</version>
</dependency>
<dependency>
<groupId>org.apache.jena</groupId>
<artifactId>apache-jena-libs</artifactId>
<type>pom</type>
<version>${jena.version}</version>
</dependency>
<dependency>
<groupId>org.dspace</groupId>
<artifactId>handle</artifactId>
@@ -1189,6 +1219,14 @@
</mailingLists>
<developers>
<developer>
<name>Pascal-Nicolas Becker</name>
<email>dspace at pascal-becker dot de</email>
<roles>
<role>commiter</role>
</roles>
<timezone>+1</timezone>
</developer>
<developer>
<name>Andrea Bollini</name>
<email>bollini at users.sourceforge.net</email>