DS-3190: Refactor org.dspace.rdf.* to use spring.

dspace-rdf was developed as standalone module. At the end of the
development most of it was moved to be included into dspace-api. Instead
of using Spring it used its own class that instantiated all necessary
classes. This PR solves the problem described in DS-3190 by refactoring
org.dspace.rdf to adapt patterns used in DSpace 6. It introduces
[dspace]/config/spring/api/rdf.xml to configure spring to instantiate
all necessary classes. org.dspace.rdf.RDFConfiguration was removed
completely, the configuration keys are centralised in
org.dspace.rdf.RDFUtil. Instead of org.dspace.rdf.RDFConfiguration and
DSpace's old ConfigurationManager the new ConfigurationService is now
used everywhere. Configuration properties shouldn't be hold in variables
anymore so that RDF profits from the new autoreload function from
DS-2654.
This commit is contained in:
Pascal-Nicolas Becker
2016-04-28 21:55:31 +02:00
parent 2e309a116f
commit 90447ed0b1
18 changed files with 462 additions and 627 deletions

View File

@@ -1,297 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.core.Constants;
import org.dspace.rdf.conversion.RDFConverter;
import org.dspace.rdf.storage.RDFStorage;
import org.dspace.rdf.storage.URIGenerator;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
*
* @author Pascal-Nicolas Becker (dspace -at- pascal -hyphen- becker -dot- de)
*/
public class RDFConfiguration {
private static final Logger log = Logger.getLogger(RDFConfiguration.class);
/**
* Property key to load the public address of the SPARQL endpoint.
*/
public static final String SPARQL_ENDPOINT_KEY = "rdf.public.sparql.endpoint";
/**
* Property key to load the class to use as URIGenerator.
*/
public static final String URIGENERATOR_KEY = "rdf.URIGenerator";
/**
* Property key to load the class to use as RDFConverter.
*/
public static final String RDFCONVERTER_KEY = "rdf.converter";
/**
* Property key to load the list of plugins for the RDFConverter.
*/
public static final String CONVERTER_PLUGINS_KEY = "rdf.converter.plugins";
/**
* Key of the Property to load the types of DSpaceObjects that should get
* converted.
*/
public static final String CONVERTER_DSOTYPES_KEY = "rdf.converter.DSOtypes";
/**
* Property key to load the class to use as RDFStorage.
*/
public static final String RDFSTORAGE_KEY = "rdf.storage";
/**
* Property key to load the address of the SPARQL 1.1 GRAPH STORE HTTP
* PROTOCOL endpoint.
*/
public static final String STORAGE_GRAPHSTORE_ENDPOINT_KEY =
"rdf.storage.graphstore.endpoint";
/**
* Property key to load whether HTTP authentication for the
* graph store endpoint is required.
*/
public static final String STORAGE_GRAPHSTORE_AUTHENTICATION_KEY =
"rdf.storage.graphstore.authentication";
/**
* Property key to load the username if authentication for the graph store
* endpoint is required.
*/
public static final String STORAGE_GRAPHSTORE_LOGIN_KEY = "rdf.storage.graphstore.login";
/**
* Property key to load the password if authentication for the graph store
* endpoint is required.
*/
public static final String STORAGE_GRAPHSTORE_PASSWORD_KEY = "rdf.storage.graphstore.password";
/**
* Property key to load the address of the SPARQL endpoint to use within
* DSpace. If the property is empty or does not exist, the public SPARQL
* endpoint will be used.
*/
public static final String STORAGE_SPARQL_ENDPOINT_KEY = "rdf.storage.sparql.endpoint";
/**
* Property key to load whether HTTP authentication for the internal SPARQL
* endpoint is required.
*/
public static final String STORAGE_SPARQL_AUTHENTICATION_KEY = "rdf.storage.sparql.authentication";
/**
* Property key to load the username if authentication for the internal
* SPARQL endpoint is required.
*/
public static final String STORAGE_SPARQL_LOGIN_KEY = "rdf.storage.sparql.login";
/**
* Property key to load the password if authentication for the internal
* SPARQL endpoint is required.
*/
public static final String STORAGE_SPARQL_PASSWORD_KEY = "rdf.storage.sparql.password";
/**
* Property key to load the URL of the dspace-rdf module. This is necessary
* to create links from the jspui or xmlui to RDF representation of
* DSpaceObjects.
*/
public static final String CONTEXT_PATH_KEY = "rdf.contextPath";
public static final String CONTENT_NEGOTIATION_KEY = "rdf.contentNegotiation.enable";
private static URIGenerator generator;
private static RDFStorage storage;
private static RDFConverter converter;
public static String[] getConverterPlugins()
{
return RDFConfiguration.loadConfigurationArray(CONVERTER_PLUGINS_KEY);
}
public static String[] getDSOTypesToConvert()
{
String[] dsoTypes = DSpaceServicesFactory.getInstance().getConfigurationService().getArrayProperty(CONVERTER_DSOTYPES_KEY);
if (dsoTypes == null)
{
log.warn("Property rdf." + CONVERTER_DSOTYPES_KEY + " was not found "
+ "or is empty. Will convert all type of DSpace Objects.");
return Constants.typeText;
}
return dsoTypes;
}
public static boolean isConvertType(int type)
{
for (String typeName : getDSOTypesToConvert())
{
if (Constants.getTypeID(typeName) == type) return true;
}
return false;
}
public static boolean isConvertType(String type)
{
for (String typeName : getDSOTypesToConvert())
{
if (typeName.equalsIgnoreCase(type)) return true;
}
return false;
}
public static boolean isContentNegotiationEnabled()
{
ConfigurationService configurationService =
DSpaceServicesFactory.getInstance().getConfigurationService();
return configurationService.getPropertyAsType(CONTENT_NEGOTIATION_KEY,
false);
}
public static String getPublicSparqlEndpointAddress()
{
ConfigurationService configurationService =
DSpaceServicesFactory.getInstance().getConfigurationService();
return configurationService.getProperty(SPARQL_ENDPOINT_KEY);
}
public static String getInternalSparqlEndpointAddress()
{
ConfigurationService configurationService =
DSpaceServicesFactory.getInstance().getConfigurationService();
String internalSparqlEndpoint =
configurationService.getProperty(STORAGE_SPARQL_ENDPOINT_KEY);
String externalSparqlEndpoint =
configurationService.getProperty(SPARQL_ENDPOINT_KEY);
return StringUtils.isEmpty(internalSparqlEndpoint) ?
externalSparqlEndpoint : internalSparqlEndpoint;
}
public static String getDSpaceRDFModuleURI()
{
ConfigurationService configurationService =
DSpaceServicesFactory.getInstance().getConfigurationService();
return configurationService.getProperty(CONTEXT_PATH_KEY);
}
protected static RDFConverter getRDFConverter()
{
if (converter == null)
{
ConfigurationService configurationService =
DSpaceServicesFactory.getInstance().getConfigurationService();
converter = (RDFConverter) initializeClass(configurationService,
RDFCONVERTER_KEY, "RDFConverter");
}
return converter;
}
/*
* Initialize the URIGenerator configured in dsapce config (see
* {@link #URIGENERATOR_KEY URIGENERATOR_KEY}).
* The URIGenerator should be configurable, using the DSpace configuration
* and not using spring to avoid xml configuration. This method loads and
* initialize the configured URIGenerator. It is static so that the
* RDFizer must not be initialized to generate the identifier for a DSO.
*/
protected static URIGenerator getURIGenerator()
{
if (generator == null)
{
ConfigurationService configurationService =
DSpaceServicesFactory.getInstance().getConfigurationService();
generator = (URIGenerator) initializeClass(configurationService,
URIGENERATOR_KEY, "URIGenerator");
}
return generator;
}
/*
* Initialize the RDFStorage configured in dsapce config (see
* {@link #RDFSTORAGE_KEY RDFSTORAGE_KEY}).
* The storage class should be configurable, using the DSpace configuration
* and not using spring to avoid xml configuration. This method loads and
* initialize the configured RDFStorage class. It is static so that the
* RDFizer must not be initialized to load RDF data.
*/
protected static RDFStorage getRDFStorage()
{
if (storage == null)
{
ConfigurationService configurationService =
DSpaceServicesFactory.getInstance().getConfigurationService();
storage = (RDFStorage) initializeClass(configurationService,
RDFSTORAGE_KEY, "RDFStorage");
}
return storage;
}
/**
* Load a comma separated value out of the configuration an split it into a
* string array.
* @param key Key of the configuration value.
* @return Null if the configuration value was not found or empty. A String
* array representing the configuration value splitted on commas.
*/
public static String[] loadConfigurationArray(String key)
{
ConfigurationService config = DSpaceServicesFactory.getInstance().getConfigurationService();
if(config.hasProperty(key))
return config.getArrayProperty(key);
else
return null;
}
/*
* This method must by static, so we can use it from
* RDFizer.generateIdentifier and RDFizer.generateGraphName. Cause this
* method is static we cannot use the configurationService initilised in
* the class constructor.
* This method loads from DSpace configuration which class to use and
* initalizes it.
*/
private static Object initializeClass(ConfigurationService configurationService,
String propertyName,
String objectName)
{
String className = configurationService.getProperty(propertyName);
if (StringUtils.isEmpty(className))
{
log.error("Cannot load " + objectName + "! Property " + propertyName
+ " not found or empty!");
throw new RuntimeException("Cannot load " + objectName
+ ", property not found or not configured!");
}
Object instantiatedObject = null;
try
{
Class objectClass = Class.forName(className);
instantiatedObject = objectClass.newInstance();
} catch (ClassNotFoundException ex) {
log.error("Cannot find class '" + className + "' for " + objectName
+ ". " + "Please check your configuration.", ex);
throw new RuntimeException("Cannot find class for " + objectName
+ " (" + className + ").", ex);
} catch (InstantiationException ex) {
log.error("Cannot instantiate " + objectName + " (class "
+ className + ").", ex);
throw new RuntimeException("Cannot instantiate " + objectName
+ " (class " + className + ").", ex);
} catch (IllegalAccessException ex) {
log.error("IllegalAccessException thrown while instantiating the "
+ objectName + " (class " + className + ").", ex);
throw new RuntimeException("IllegalAccessException thrown while "
+ "instantiating the " + objectName + " (class "
+ className + ").", ex);
}
return instantiatedObject;
}
}

View File

@@ -24,6 +24,8 @@ import org.dspace.content.Site;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.rdf.factory.RDFFactory;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
*
@@ -33,6 +35,54 @@ public class RDFUtil {
private static final Logger log = Logger.getLogger(RDFUtil.class);
private static final AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
public static final String CONTENT_NEGOTIATION_KEY = "rdf.contentNegotiation.enable";
/**
* Key of the Property to load the types of DSpaceObjects that should get
* converted.
*/
public static final String CONVERTER_DSOTYPES_KEY = "rdf.converter.DSOtypes";
/**
* Property key to load the password if authentication for the graph store
* endpoint is required.
*/
public static final String STORAGE_GRAPHSTORE_PASSWORD_KEY = "rdf.storage.graphstore.password";
/**
* Property key to load the URL of the dspace-rdf module. This is necessary
* to create links from the jspui or xmlui to RDF representation of
* DSpaceObjects.
*/
public static final String CONTEXT_PATH_KEY = "rdf.contextPath";
/**
* Property key to load the public address of the SPARQL endpoint.
*/
public static final String SPARQL_ENDPOINT_KEY = "rdf.public.sparql.endpoint";
/**
* Property key to load the username if authentication for the internal
* SPARQL endpoint is required.
*/
public static final String STORAGE_SPARQL_LOGIN_KEY = "rdf.storage.sparql.login";
/**
* Property key to load the password if authentication for the internal
* SPARQL endpoint is required.
*/
public static final String STORAGE_SPARQL_PASSWORD_KEY = "rdf.storage.sparql.password";
/**
* Property key to load the address of the SPARQL 1.1 GRAPH STORE HTTP
* PROTOCOL endpoint.
*/
public static final String STORAGE_GRAPHSTORE_ENDPOINT_KEY = "rdf.storage.graphstore.endpoint";
/**
* Property key to load the address of the SPARQL endpoint to use within
* DSpace. If the property is empty or does not exist, the public SPARQL
* endpoint will be used.
*/
public static final String STORAGE_SPARQL_ENDPOINT_KEY = "rdf.storage.sparql.endpoint";
/**
* Property key to load the username if authentication for the graph store
* endpoint is required.
*/
public static final String STORAGE_GRAPHSTORE_LOGIN_KEY = "rdf.storage.graphstore.login";
/**
* Loads converted data of a DSpaceObject identified by the URI provided
* as {@code identifier}. This method uses the RDFStorage configurated in
@@ -45,7 +95,7 @@ public class RDFUtil {
*/
public static Model loadModel(String identifier)
{
return RDFConfiguration.getRDFStorage().load(identifier);
return RDFFactory.getInstance().getRDFStorage().load(identifier);
}
/**
@@ -65,7 +115,7 @@ public class RDFUtil {
public static String generateIdentifier(Context context, DSpaceObject dso)
throws SQLException
{
return RDFConfiguration.getURIGenerator().generateIdentifier(context, dso);
return RDFFactory.getInstance().getURIGenerator().generateIdentifier(context, dso);
}
/**
@@ -89,7 +139,7 @@ public class RDFUtil {
String handle, List<String> identifier)
throws SQLException
{
return RDFConfiguration.getURIGenerator().generateIdentifier(context,
return RDFFactory.getInstance().getURIGenerator().generateIdentifier(context,
type, id, handle, identifier);
}
/**
@@ -124,22 +174,33 @@ public class RDFUtil {
ItemWithdrawnException, ItemNotDiscoverableException,
AuthorizeException, IllegalArgumentException
{
if (dso.getType() != Constants.SITE
&& dso.getType() != Constants.COMMUNITY
&& dso.getType() != Constants.COLLECTION
&& dso.getType() != Constants.ITEM)
String[] dsoTypes = DSpaceServicesFactory.getInstance()
.getConfigurationService()
.getArrayProperty(CONVERTER_DSOTYPES_KEY);
if (dsoTypes == null || dsoTypes.length == 0)
{
throw new IllegalArgumentException(ContentServiceFactory.getInstance().getDSpaceObjectService(dso).getTypeText(dso)
+ " is currently not supported as independent entity.");
log.warn("Property rdf." + CONVERTER_DSOTYPES_KEY + " was not found "
+ "or is empty. Will convert all type of DSpace Objects.");
} else {
boolean found = false;
for (String type : dsoTypes)
{
if (StringUtils.equalsIgnoreCase(Constants.typeText[dso.getType()], type.trim()))
{
found = true;
break;
}
if (!RDFConfiguration.isConvertType(ContentServiceFactory.getInstance().getDSpaceObjectService(dso).getTypeText(dso)))
}
if (!found)
{
log.warn("Configuration of DSpaceObjects of type "
+ Constants.typeText[dso.getType()]
+ " prohibitted by configuration.");
return null;
}
}
isPublic(context, dso);
return RDFConfiguration.getRDFConverter().convert(context, dso);
return RDFFactory.getInstance().getRDFConverter().convert(context, dso);
}
/**
@@ -190,11 +251,11 @@ public class RDFUtil {
{
// if data about this dso is stored in the triplestore already, we
// should remove it as a conversion currently result in no data
RDFConfiguration.getRDFStorage().delete(identifier);
RDFFactory.getInstance().getRDFStorage().delete(identifier);
return null;
}
RDFConfiguration.getRDFStorage().store(identifier, convertedData);
RDFFactory.getInstance().getRDFStorage().store(identifier, convertedData);
return convertedData;
}
@@ -274,7 +335,7 @@ public class RDFUtil {
*/
public static void delete(String uri)
{
RDFConfiguration.getRDFStorage().delete(uri);
RDFFactory.getInstance().getRDFStorage().delete(uri);
}
/**
@@ -290,11 +351,11 @@ public class RDFUtil {
public static void delete(Context ctx, int type, UUID id, String handle, List<String> identifiers)
throws SQLException, RDFMissingIdentifierException
{
String uri = RDFConfiguration.getURIGenerator().generateIdentifier(ctx,
type, id, handle, identifiers);
String uri = RDFFactory.getInstance().getURIGenerator()
.generateIdentifier(ctx, type, id, handle, identifiers);
if (uri != null)
{
RDFConfiguration.getRDFStorage().delete(uri);
RDFFactory.getInstance().getRDFStorage().delete(uri);
} else {
throw new RDFMissingIdentifierException(type, id);
}

View File

@@ -39,6 +39,8 @@ import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.rdf.factory.RDFFactory;
import org.dspace.rdf.storage.RDFStorage;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
@@ -53,17 +55,19 @@ public class RDFizer {
private static final Logger log = Logger.getLogger(RDFizer.class);
protected ConfigurationService configurationService;
protected boolean stdout;
protected boolean verbose;
protected boolean dryrun;
protected String lang;
protected Context context;
protected final ConfigurationService configurationService;
protected final ContentServiceFactory contentServiceFactory;
protected final CommunityService communityService;
protected final ItemService itemService;
protected final HandleService handleService;
protected final RDFStorage storage;
/**
* Set to remember with DSpaceObject were converted or deleted from the
@@ -75,17 +79,19 @@ public class RDFizer {
public RDFizer()
{
this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
this.stdout = false;
this.verbose = false;
this.dryrun = false;
this.lang = "TURTLE";
this.processed = new CopyOnWriteArraySet<UUID>();
this.context = new Context(Context.READ_ONLY);
this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
this.contentServiceFactory = ContentServiceFactory.getInstance();
this.communityService = contentServiceFactory.getCommunityService();
this.itemService = contentServiceFactory.getItemService();
this.handleService = HandleServiceFactory.getInstance().getHandleService();
this.storage = RDFFactory.getInstance().getRDFStorage();
}
/**
@@ -170,7 +176,7 @@ public class RDFizer {
public void deleteAll()
{
report("Sending delete command to the triple store.");
if (!this.dryrun) RDFConfiguration.getRDFStorage().deleteAll();
if (!this.dryrun) storage.deleteAll();
report("Deleted all data from the triplestore.");
}
@@ -218,7 +224,7 @@ public class RDFizer {
report("Deleting Named Graph" + identifier);
if (!dryrun)
{
RDFConfiguration.getRDFStorage().delete(identifier);
storage.delete(identifier);
}
}
};
@@ -412,9 +418,11 @@ public class RDFizer {
}
// Currently Bundles and Bitsreams aren't supported as independent entities.
// The should be converted as part of an item. So we do not need to make
// They should be converted as part of an item. So we do not need to make
// the recursive call for them. An item itself will be converted as part
// of the callback call below.
// The following code is left here for the day, we decide to also convert
// bundles and/or bitstreams.
//
// if (dso instanceof Item)
// {
@@ -563,7 +571,7 @@ public class RDFizer {
{
if (!this.dryrun)
{
RDFConfiguration.getRDFStorage().delete(identifier);
storage.delete(identifier);
}
if (this.verbose)
{

View File

@@ -35,6 +35,7 @@ import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.rdf.RDFUtil;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
/**
*
@@ -47,6 +48,7 @@ public class MetadataConverterPlugin implements ConverterPlugin
public final static String METADATA_PREFIXES_KEY = "rdf.metadata.prefixes";
private final static Logger log = Logger.getLogger(MetadataConverterPlugin.class);
@Autowired(required=true)
protected ConfigurationService configurationService;
@Override

View File

@@ -12,14 +12,12 @@ import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import java.sql.SQLException;
import java.util.List;
import java.util.ArrayList;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
import org.dspace.rdf.RDFConfiguration;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
*
@@ -27,56 +25,33 @@ import org.dspace.services.factory.DSpaceServicesFactory;
*/
public class RDFConverterImpl implements RDFConverter
{
protected ConfigurationService configurationService;
protected List<ConverterPlugin> plugins;
private static final Logger log = Logger.getLogger(RDFConverterImpl.class);
protected RDFConverterImpl()
{
this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
this.plugins = new ArrayList<ConverterPlugin>();
protected ConfigurationService configurationService;
protected List<ConverterPlugin> plugins;
String pluginNames[] = RDFConfiguration.getConverterPlugins();
if (pluginNames == null || pluginNames.length == 0)
@Autowired(required=true)
public void setConfigurationService(ConfigurationService configurationService)
{
log.error("Cannot load RDF converter plugins!");
throw new RuntimeException("Cannot load rdf converter plugins!");
this.configurationService = configurationService;
}
for (String plugin : pluginNames)
@Autowired(required=true)
public void setPlugins(List<ConverterPlugin> plugins)
{
try
this.plugins = plugins;
if (log.isDebugEnabled())
{
Class pluginClass = Class.forName(plugin);
ConverterPlugin pluginInstance =
(ConverterPlugin) pluginClass.newInstance();
pluginInstance.setConfigurationService(this.configurationService);
this.plugins.add(pluginInstance);
StringBuilder pluginNames = new StringBuilder();
for (ConverterPlugin plugin : plugins)
{
if (pluginNames.length() > 0)
{
pluginNames.append(", ");
}
catch (ClassNotFoundException ex)
{
log.warn("Cannot load plugin '" + plugin
+ "': class not found!", ex);
// if we would ignore a plugin, we would generate incomplete RDF data.
throw new RuntimeException(ex.getMessage(), ex);
pluginNames.append(plugin.getClass().getCanonicalName());
}
catch (IllegalAccessException ex)
{
log.warn("Cannot load plugin '" + plugin
+ "': illegal access!", ex);
// if we would ignore a plugin, we would generate incomplete RDF data.
throw new RuntimeException(ex.getMessage(), ex);
}
catch (InstantiationException ex)
{
log.warn("Cannot load plugin '" + plugin
+ "': cannot instantiate the module!", ex);
// if we would ignore a plugin, we would generate incomplete RDF data.
throw new RuntimeException(ex.getMessage(), ex);
}
log.debug("Successfully loaded RDFConverterPlugin "
+ plugin + ".");
log.debug("Loaded the following plugins: " + pluginNames.toString());
}
}

View File

@@ -22,16 +22,15 @@ import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.app.util.Util;
import org.dspace.content.*;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.SiteService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.rdf.RDFConfiguration;
import org.dspace.rdf.RDFUtil;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
/**
*
@@ -53,91 +52,18 @@ implements ConverterPlugin
private static final Logger log = Logger.getLogger(SimpleDSORelationsConverterPlugin.class);
@Autowired(required=true)
protected BitstreamService bitstreamService;
@Autowired(required=true)
protected ItemService itemService;
@Autowired(required=true)
protected CommunityService communityService;
@Autowired(required=true)
protected SiteService siteService;
@Autowired(required=true)
protected ConfigurationService configurationService;
protected String[] site2community;
protected String[] community2site;
protected String[] community2subcommunity;
protected String[] subcommunity2community;
protected String[] community2collection;
protected String[] collection2community;
protected String[] collection2item;
protected String[] item2collection;
protected String[] item2bitstream;
protected BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();
protected ItemService itemService = ContentServiceFactory.getInstance().getItemService();
protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
protected SiteService siteService = ContentServiceFactory.getInstance().getSiteService();
public SimpleDSORelationsConverterPlugin()
{
site2community = RDFConfiguration.loadConfigurationArray(SIMPLE_RELATIONS_SITE2COMMUNITY_KEY);
community2site = RDFConfiguration.loadConfigurationArray(SIMPLE_RELATIONS_COMMUNITY2SITE_KEY);
community2subcommunity = RDFConfiguration.loadConfigurationArray(SIMPLE_RELATIONS_COMMUNITY2SUBCOMMUNITY_KEY);
subcommunity2community = RDFConfiguration.loadConfigurationArray(SIMPLE_RELATIONS_SUBCOMMUNITY2COMMUNITY_KEY);
community2collection = RDFConfiguration.loadConfigurationArray(SIMPLE_RELATIONS_COMMUNITY2COLLECTION_KEY);
collection2community = RDFConfiguration.loadConfigurationArray(SIMPLE_RELATIONS_COLLECTION2COMMUNITY_KEY);
collection2item = RDFConfiguration.loadConfigurationArray(SIMPLE_RELATIONS_COLLECTION2ITEM_KEY);
item2collection = RDFConfiguration.loadConfigurationArray(SIMPLE_RELATIONS_ITEM2COLLECTION_KEY);
item2bitstream = RDFConfiguration.loadConfigurationArray(SIMPLE_RELATIONS_ITEM2BITSTREAM_KEY);
if (site2community == null)
{
log.warn("SimpleDSORelationsConverterPlugin was unable to load "
+ "configuration to convert relation between the repository "
+ "the repository (SITE) and the top communities.");
}
if (community2site == null)
{
log.warn("SimpleDSORelationsConverterPlugin was unable to load "
+ "configuration to convert relation between "
+ "the top communities and the repository (SITE).");
}
if (community2subcommunity == null)
{
log.warn("SimpleDSORelationsConverterPlugin was unable to load "
+ "configuration to convert relation between "
+ "communities and subcommunities.");
}
if (subcommunity2community == null)
{
log.warn("SimpleDSORelationsConverterPlugin was unable to load "
+ "configuration to convert relation between "
+ "subcommunities and communities.");
}
if (community2collection == null)
{
log.warn("SimpleDSORelationsConverterPlugin was unable to load "
+ "configuration to convert relation between "
+ "communities and collections.");
}
if (collection2community == null)
{
log.warn("SimpleDSORelationsConverterPlugin was unable to load "
+ "configuration to convert relation between "
+ "collections and communities.");
}
if (collection2item == null)
{
log.warn("SimpleDSORelationsConverterPlugin was unable to load "
+ "configuration to convert relation between "
+ "collections and items");
}
if (item2collection == null)
{
log.warn("SimpleDSORelationsConverterPlugin was unable to load "
+ "configuration to convert relation between "
+ "items and collections");
}
if (item2bitstream == null)
{
log.warn("SimpleDSORelationsConverterPlugin was unable to load "
+ "configuration to convert relation between "
+ "items and bitstreams.");
}
}
/**
* Loads the prefixes that should be used by the
* SimpleDSORelationsConverterPlugin. Please remember to close the model
@@ -210,7 +136,8 @@ implements ConverterPlugin
public Model convertSite(Context context, Site site)
throws SQLException
{
if (site2community == null)
String[] site2community = configurationService.getArrayProperty(SIMPLE_RELATIONS_SITE2COMMUNITY_KEY);
if (site2community == null || site2community.length == 0)
{
log.info("Either there was a problem loading the configuration or "
+ "linking from the repository (SITE) to the top level "
@@ -262,33 +189,52 @@ implements ConverterPlugin
public Model convertCommunity(Context context, Community community)
throws SQLException
{
if (community2site == null)
String[] community2site = configurationService.getArrayProperty(SIMPLE_RELATIONS_COMMUNITY2SITE_KEY);
if (community2site == null || community2site.length == 0)
{
log.info("Either there was a problem loading the configuration or "
+ "linking from the top level communities to the repository "
+ "(SITE) is disabled. Won't link from the top level "
+ "communities to the repository (SITE).");
// don't return here, as we might have to add other links.
// ensure community2site is not null
community2site = new String[] {};
}
if (community2subcommunity == null)
String[] community2subcommunity = configurationService.getArrayProperty(SIMPLE_RELATIONS_COMMUNITY2SUBCOMMUNITY_KEY);
if (community2subcommunity == null || community2subcommunity.length == 0)
{
log.info("Either there was a problem loading the configuration or "
+ "linking from communities to subcommunities was disabled. "
+ "Won't link from communities to subcommunities.");
// don't return here, as we might have to add other links.
// ensure community2subcommunity is not null
community2subcommunity = new String[] {};
}
if (subcommunity2community == null)
String[] subcommunity2community = configurationService.getArrayProperty(SIMPLE_RELATIONS_SUBCOMMUNITY2COMMUNITY_KEY);
if (subcommunity2community == null || subcommunity2community.length == 0)
{
log.info("Either there was a problem loading the configuration or "
+ "linking from subcommunities to communities was disabled. "
+ "Won't link from subcommunities to communities.");
// don't return here, as we might have to add other links.
// ensure subcommunity2community is not null
subcommunity2community = new String[] {};
}
if (community2collection == null)
String[] community2collection = configurationService.getArrayProperty(SIMPLE_RELATIONS_COMMUNITY2COLLECTION_KEY);
if (community2collection == null || community2collection.length == 0)
{
log.info("Either there was a problem loading the configuration or "
+ "linking from communities to collections was disabled. "
+ "Won't link from collections to subcommunities.");
// don't return here, as we might have to add other links.
// ensure community2collection is not null
community2collection = new String[] {};
}
if (community2site == null && community2subcommunity == null
&& subcommunity2community == null && community2collection == null)
if (community2site.length == 0 && community2subcommunity.length == 0
&& subcommunity2community.length == 0 && community2collection.length == 0)
{
return null;
}
@@ -393,19 +339,28 @@ implements ConverterPlugin
public Model convertCollection(Context context, Collection collection)
throws SQLException
{
if (collection2community == null)
String[] collection2community = configurationService.getArrayProperty(SIMPLE_RELATIONS_COLLECTION2COMMUNITY_KEY);
if (collection2community == null || collection2community.length == 0)
{
log.info("Either there was a problem loading the configuration or "
+ "linking from collections to communities was disabled. "
+ "Won't link from collections to communities.");
// don't return here, as we might have to link to items.
// ensure collection2community is not null
collection2community = new String[] {};
}
if (collection2item == null)
String[] collection2item = configurationService.getArrayProperty(SIMPLE_RELATIONS_COLLECTION2ITEM_KEY);
if (collection2item == null || collection2item.length == 0)
{
log.info("Either there was a problem loading the configuration or "
+ "linking from collections to items was disabled. "
+ "Won't link from collections to items.");
// don't return here, as we might have to link to communities.
// ensure collection2item is not null
collection2item = new String[] {};
}
if (collection2community == null && collection2item == null)
if (collection2community.length == 0 && collection2item.length == 0)
{
return null;
}
@@ -468,19 +423,29 @@ implements ConverterPlugin
public Model convertItem(Context context, Item item)
throws SQLException
{
if (item2collection == null)
String[] item2collection = configurationService.getArrayProperty(SIMPLE_RELATIONS_ITEM2COLLECTION_KEY);
if (item2collection == null || item2collection.length == 0)
{
log.info("Either there was a problem loading the configuration or "
+ "linking from items to collections was disabled. "
+ "Won't link from items to collections.");
// don't return here, as we might have to link to bitstreams.
// ensure item2collection is not null
item2collection = new String[] {};
}
if (item2bitstream == null)
String[] item2bitstream = configurationService.getArrayProperty(SIMPLE_RELATIONS_ITEM2BITSTREAM_KEY);
if (item2bitstream == null || item2bitstream.length == 0)
{
log.info("Either there was a problem loading the configuration or "
+ "linking from items to bitstreams was disabled. "
+ "Won't link from items to bitstreams.");
// don't return here, as we might have to link to collections.
// ensure item2bitstream is not null
item2bitstream = new String[] {};
}
if (item2collection == null && item2bitstream == null)
if (item2collection.length == 0 && item2bitstream.length == 0)
{
return null;
}

View File

@@ -22,6 +22,7 @@ import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.rdf.RDFUtil;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
/**
*
@@ -35,12 +36,14 @@ implements ConverterPlugin
public static final String CONSTANT_DATA_FILENAME_KEY_PREFIX = "rdf.constant.data.";
public static final String CONSTANT_DATA_GENERAL_KEY_SUFFIX = "GENERAL";
@Autowired(required=true)
protected ConfigurationService configurationService;
@Override
public void setConfigurationService(ConfigurationService configurationService) {
this.configurationService = configurationService;
}
@Override
public Model convert(Context context, DSpaceObject dso)
throws SQLException

View File

@@ -0,0 +1,32 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.factory;
import org.dspace.rdf.conversion.RDFConverter;
import org.dspace.rdf.storage.RDFStorage;
import org.dspace.rdf.storage.URIGenerator;
import org.dspace.utils.DSpace;
/**
* Abstract factory to get services for the rdf package, use RDFFactory.getInstance() to retrieve an implementation.
* @author Pascal-Nicolas Becker (p dot becker at tu hyphen berlin dot de)
*/
public abstract class RDFFactory
{
public abstract RDFStorage getRDFStorage();
public abstract URIGenerator getURIGenerator();
public abstract RDFConverter getRDFConverter();
public static RDFFactory getInstance()
{
return new DSpace().getServiceManager().getServiceByName("rdfFactory", RDFFactory.class);
}
}

View File

@@ -0,0 +1,69 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.rdf.factory;
import org.apache.log4j.Logger;
import org.dspace.rdf.conversion.RDFConverter;
import org.dspace.rdf.storage.RDFStorage;
import org.dspace.rdf.storage.URIGenerator;
import org.springframework.beans.factory.annotation.Required;
/**
*
* @author Pascal-Nicolas Becker (p dot becker at tu hyphen berlin dot de)
*/
public class RDFFactoryImpl extends RDFFactory
{
// we have several URIGenerators that use each other as fallback
// following we have to instantiate all of them and cannot use autowiring
// by type here. So we use setters and properties in spring configuration
// instead.
private static final Logger log = Logger.getLogger(RDFFactoryImpl.class);
private RDFStorage storage;
private URIGenerator generator;
private RDFConverter converter;
@Required
public void setStorage(RDFStorage storage) {
this.storage = storage;
}
@Required
public void setGenerator(URIGenerator generator) {
if (log.isDebugEnabled())
{
log.debug("Using '" + generator.getClass().getCanonicalName()
+ "' as URIGenerator.");
}
this.generator = generator;
}
@Required
public void setConverter(RDFConverter converter) {
this.converter = converter;
}
@Override
public RDFStorage getRDFStorage() {
return storage;
}
@Override
public URIGenerator getURIGenerator() {
return generator;
}
@Override
public RDFConverter getRDFConverter() {
return converter;
}
}

View File

@@ -21,7 +21,8 @@ import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.dspace.rdf.RDFConfiguration;
import org.dspace.rdf.RDFUtil;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
*
@@ -45,7 +46,8 @@ public class NegotiationFilter implements Filter
{
try
{
if (!RDFConfiguration.isContentNegotiationEnabled())
if (!DSpaceServicesFactory.getInstance().getConfigurationService()
.getBooleanProperty(RDFUtil.CONTENT_NEGOTIATION_KEY, false))
{
chain.doFilter(request, response);
return;

View File

@@ -15,8 +15,8 @@ import java.util.Iterator;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.core.ConfigurationManager;
import org.dspace.rdf.RDFConfiguration;
import org.dspace.rdf.RDFUtil;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
*
@@ -244,7 +244,8 @@ public class Negotiator {
if (StringUtils.isEmpty(handle))
{
log.warn("Handle is empty, set it to Site Handle.");
handle = ConfigurationManager.getProperty("handle.prefix") + "/0";
handle = DSpaceServicesFactory.getInstance().getConfigurationService()
.getProperty("handle.prefix") + "/0";
}
// don't redirect if HTML is requested and content negotiation is done
@@ -262,8 +263,10 @@ public class Negotiator {
// if html is requested we have to forward to the repositories webui.
if ("html".equals(lang))
{
urlBuilder.append(ConfigurationManager.getProperty("dspace.url"));
if (!handle.equals(ConfigurationManager.getProperty("handle.prefix") + "/0"))
urlBuilder.append(DSpaceServicesFactory.getInstance()
.getConfigurationService().getProperty("dspace.url"));
if (!handle.equals(DSpaceServicesFactory.getInstance()
.getConfigurationService().getProperty("handle.prefix") + "/0"))
{
urlBuilder.append("/handle/");
urlBuilder.append(handle).append("/").append(extraPathInfo);
@@ -287,7 +290,9 @@ public class Negotiator {
}
// load the URI of the dspace-rdf module.
urlBuilder.append(RDFConfiguration.getDSpaceRDFModuleURI());
urlBuilder.append(DSpaceServicesFactory.getInstance()
.getConfigurationService()
.getProperty(RDFUtil.CONTEXT_PATH_KEY));
if (urlBuilder.length() == 0)
{
log.error("Cannot load URL of dspace-rdf module. "

View File

@@ -8,6 +8,10 @@
package org.dspace.rdf.storage;
import org.dspace.identifier.service.DOIService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required;
/**
* Extends the DOIURIGenerator but uses handles as fallback to DOIs.
* @author pbecker
@@ -16,7 +20,16 @@ public class DOIHandleURIGenerator
extends DOIURIGenerator
implements URIGenerator
{
protected static URIGenerator fallback;
protected final static URIGenerator fallback = new HandleURIGenerator();
@Required
public static void setFallback(URIGenerator fallback) {
DOIURIGenerator.fallback = fallback;
}
@Autowired(required=true)
public void setDoiService(DOIService doiService) {
this.doiService = doiService;
}
}

View File

@@ -20,6 +20,8 @@ import org.dspace.identifier.service.DOIService;
import java.sql.SQLException;
import java.util.List;
import java.util.UUID;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required;
/**
*
@@ -30,12 +32,15 @@ implements URIGenerator
{
private static final Logger log = Logger.getLogger(DOIURIGenerator.class);
/*
* Currently (DSpace 5) DSpace supports DOIs for items only. This fallback
* will be used to generate an URI, whenever no DOI was found.
*/
protected final static URIGenerator fallback = new LocalURIGenerator();
protected final DOIService doiService = IdentifierServiceFactory.getInstance().getDOIService();
protected static URIGenerator fallback;
@Required
public static void setFallback(URIGenerator fallback) {
DOIURIGenerator.fallback = fallback;
}
@Autowired(required=true)
protected DOIService doiService;
@Override
public String generateIdentifier(Context context, int type, UUID id, String handle, List<String> identifiers) throws SQLException {

View File

@@ -23,6 +23,7 @@ import org.dspace.handle.service.HandleService;
import java.sql.SQLException;
import java.util.List;
import java.util.UUID;
import org.springframework.beans.factory.annotation.Autowired;
/**
*
@@ -31,8 +32,10 @@ import java.util.UUID;
public class HandleURIGenerator implements URIGenerator {
private static final Logger log = Logger.getLogger(HandleURIGenerator.class);
protected final SiteService siteService = ContentServiceFactory.getInstance().getSiteService();
protected final HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
@Autowired(required=true)
protected SiteService siteService;
@Autowired(required=true)
protected HandleService handleService;
@Override

View File

@@ -15,12 +15,13 @@ import java.util.UUID;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Site;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.SiteService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.rdf.RDFConfiguration;
import org.dspace.rdf.RDFUtil;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
*
@@ -29,14 +30,16 @@ import org.dspace.rdf.RDFConfiguration;
public class LocalURIGenerator implements URIGenerator {
private static final Logger log = Logger.getLogger(LocalURIGenerator.class);
protected final SiteService siteService = ContentServiceFactory.getInstance().getSiteService();
@Autowired(required=true)
protected SiteService siteService;
@Override
public String generateIdentifier(Context context, int type, UUID id,
String handle, List<String> identifiers)
throws SQLException
{
String urlPrefix = RDFConfiguration.getDSpaceRDFModuleURI() + "/resource/";
String urlPrefix = DSpaceServicesFactory.getInstance().getConfigurationService()
.getProperty(RDFUtil.CONTEXT_PATH_KEY) + "/resource/";
if (type == Constants.SITE)
{

View File

@@ -30,9 +30,9 @@ import org.apache.jena.atlas.web.auth.SimpleAuthenticator;
import org.apache.jena.web.DatasetGraphAccessor;
import org.apache.jena.web.DatasetGraphAccessorHTTP;
import org.apache.log4j.Logger;
import org.dspace.rdf.RDFConfiguration;
import org.dspace.rdf.RDFUtil;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
*
@@ -43,85 +43,8 @@ implements RDFStorage
{
private static final Logger log = Logger.getLogger(RDFStorageImpl.class);
private final String GRAPHSTORE_ENDPOINT;
private final String GRAPHSTORE_LOGIN;
private final String GRAPHSTORE_PASSWORD;
private final String SPARQL_ENDPOINT;
private final String SPARQL_LOGIN;
private final String SPARQL_PASSWORD;
private ConfigurationService configurationService;
public RDFStorageImpl()
{
this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
this.GRAPHSTORE_ENDPOINT = this.configurationService
.getProperty(RDFConfiguration.STORAGE_GRAPHSTORE_ENDPOINT_KEY);
if (StringUtils.isEmpty(this.GRAPHSTORE_ENDPOINT))
{
log.warn("Cannot load Graph Store HTTP Protocol endpoint! Property "
+ RDFConfiguration.STORAGE_GRAPHSTORE_ENDPOINT_KEY + " does not "
+ "exist or is empty.");
throw new RuntimeException("Cannot load Graph Store HTTP Protocol "
+ "endpoint! Property "
+ RDFConfiguration.STORAGE_GRAPHSTORE_ENDPOINT_KEY + " does not "
+ "exist or is empty.");
}
boolean graphstore_use_auth = this.configurationService.getPropertyAsType(
RDFConfiguration.STORAGE_GRAPHSTORE_AUTHENTICATION_KEY, false);
String graphstore_login = this.configurationService.getProperty(
RDFConfiguration.STORAGE_GRAPHSTORE_LOGIN_KEY);
String graphstore_password = this.configurationService.getProperty(
RDFConfiguration.STORAGE_GRAPHSTORE_PASSWORD_KEY);
if (!graphstore_use_auth
|| (graphstore_use_auth && StringUtils.isEmpty(graphstore_login))
|| (graphstore_use_auth && StringUtils.isEmpty(graphstore_password)))
{
this.GRAPHSTORE_LOGIN = null;
this.GRAPHSTORE_PASSWORD = null;
if (graphstore_use_auth)
{
log.warn("The rdf storage is configured to use authentication "
+ "to connect to the Graph Store HTTP Protocol endpoint, "
+ "but no credentials are configured.");
}
} else {
this.GRAPHSTORE_LOGIN = graphstore_login;
this.GRAPHSTORE_PASSWORD = graphstore_password;
}
this.SPARQL_ENDPOINT = RDFConfiguration.getInternalSparqlEndpointAddress();
if (StringUtils.isEmpty(this.SPARQL_ENDPOINT))
{
log.warn("Cannot load internal or public SPARQL endpoint!");
throw new RuntimeException("Cannot load internal or public SPARQL "
+ "endpoint!");
}
boolean sparql_use_auth = this.configurationService.getPropertyAsType(
RDFConfiguration.STORAGE_SPARQL_AUTHENTICATION_KEY, false);
String sparql_login = this.configurationService.getProperty(
RDFConfiguration.STORAGE_SPARQL_LOGIN_KEY);
String sparql_password = this.configurationService.getProperty(
RDFConfiguration.STORAGE_SPARQL_PASSWORD_KEY);
if (!sparql_use_auth
|| (sparql_use_auth && StringUtils.isEmpty(sparql_login))
|| (sparql_use_auth && StringUtils.isEmpty(sparql_password)))
{
this.SPARQL_LOGIN = null;
this.SPARQL_PASSWORD = null;
if (sparql_use_auth)
{
log.warn("The rdf storage is configured to use authentication "
+ "for sparql quries, but no credentials are configured.");
}
} else {
this.SPARQL_LOGIN = sparql_login;
this.SPARQL_PASSWORD = sparql_password;
}
}
@Autowired(required=true)
protected ConfigurationService configurationService;
@Override
public void store(String uri, Model model)
@@ -153,14 +76,18 @@ implements RDFStorage
protected DatasetGraphAccessor getAccessor()
{
DatasetGraphAccessor accessor;
if (this.GRAPHSTORE_LOGIN != null)
if (configurationService.hasProperty(RDFUtil.STORAGE_GRAPHSTORE_LOGIN_KEY)
&& configurationService.hasProperty(RDFUtil.STORAGE_GRAPHSTORE_PASSWORD_KEY))
{
HttpAuthenticator httpAuthenticator = new SimpleAuthenticator(
GRAPHSTORE_LOGIN, GRAPHSTORE_PASSWORD.toCharArray());
accessor = new DatasetGraphAccessorHTTP(GRAPHSTORE_ENDPOINT,
configurationService.getProperty(RDFUtil.STORAGE_GRAPHSTORE_LOGIN_KEY),
configurationService.getProperty(RDFUtil.STORAGE_GRAPHSTORE_PASSWORD_KEY).toCharArray());
accessor = new DatasetGraphAccessorHTTP(getGraphStoreEndpoint(),
httpAuthenticator);
} else {
accessor = new DatasetGraphAccessorHTTP(GRAPHSTORE_ENDPOINT);
log.debug("Did not found credential to use for our connection to the "
+ "Graph Store HTTP endpoint, trying to connect unauthenticated.");
accessor = new DatasetGraphAccessorHTTP(getGraphStoreEndpoint());
}
return accessor;
}
@@ -184,14 +111,16 @@ implements RDFStorage
public List<String> getAllStoredGraphs() {
String queryString = "SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o } }";
QueryExecution qexec;
if (this.SPARQL_LOGIN != null)
if (configurationService.hasProperty(RDFUtil.STORAGE_SPARQL_LOGIN_KEY)
&& configurationService.hasProperty(RDFUtil.STORAGE_SPARQL_PASSWORD_KEY))
{
HttpAuthenticator httpAuthenticator = new SimpleAuthenticator(
SPARQL_LOGIN, SPARQL_PASSWORD.toCharArray());
qexec = QueryExecutionFactory.sparqlService(SPARQL_ENDPOINT,
configurationService.getProperty(RDFUtil.STORAGE_SPARQL_LOGIN_KEY),
configurationService.getProperty(RDFUtil.STORAGE_GRAPHSTORE_PASSWORD_KEY).toCharArray());
qexec = QueryExecutionFactory.sparqlService(getSparqlEndpoint(),
queryString, httpAuthenticator);
} else {
qexec = QueryExecutionFactory.sparqlService(SPARQL_ENDPOINT,
qexec = QueryExecutionFactory.sparqlService(getSparqlEndpoint(),
queryString);
}
@@ -207,22 +136,40 @@ implements RDFStorage
}
qexec.close();
return graphs;
/*
} catch (QueryExceptionHTTP ex)
{
System.err.println("== QUERYEXCEPTIONHTTP ==");
System.err.println(ex.getMessage());
System.err.println(ex.getResponseCode() + ": " + ex.getResponseMessage());
Throwable cause = ex.getCause();
int i = 1;
while (cause != null)
{
System.err.println("Cause " + i + " '" + cause.getClass().getName() + "': " + cause.getMessage());
cause = cause.getCause();
i++;
}
ex.printStackTrace(System.err);
throw new RuntimeException(ex);
}*/
protected String getGraphStoreEndpoint()
{
String endpoint = configurationService.getProperty(RDFUtil.STORAGE_GRAPHSTORE_ENDPOINT_KEY);
if (StringUtils.isEmpty(endpoint))
{
log.warn("Cannot load Graph Store HTTP Protocol endpoint! Property "
+ RDFUtil.STORAGE_GRAPHSTORE_ENDPOINT_KEY + " does not "
+ "exist or is empty.");
throw new RuntimeException("Cannot load Graph Store HTTP Protocol "
+ "endpoint! Property "
+ RDFUtil.STORAGE_GRAPHSTORE_ENDPOINT_KEY + " does not "
+ "exist or is empty.");
}
return endpoint;
}
protected String getSparqlEndpoint()
{
// Lets see if a SPARQL endpoint is defined to be used by RDFStorageImpl
String endpoint = configurationService.getProperty(RDFUtil.STORAGE_SPARQL_ENDPOINT_KEY);
if (StringUtils.isEmpty(endpoint))
{
// try to load the public sparql endpoint
endpoint = configurationService.getProperty(RDFUtil.SPARQL_ENDPOINT_KEY);
}
// check if we found an endpoint
if (StringUtils.isEmpty(endpoint))
{
log.warn("Cannot load internal or public SPARQL endpoint!");
throw new RuntimeException("Cannot load internal or public SPARQL "
+ "endpoint!");
}
return endpoint;
}
}

View File

@@ -1,34 +1,20 @@
#---------------------------------------------------------------#
#---------------------RDF CONFIGURATIONS------------------------#
#---------------------------------------------------------------#
# These configs are used by the RDF interface #
#---------------------------------------------------------------#
# These configs are used by dspace-rdf and the buildin Linked Data export (rdfizer)
# Configure if content negotiation should be enabled
rdf.contentNegotiation.enable = false
# Set the url of the dspace-rdf module here. This is necessary to use content
# negotiation
rdf.contextPath = ${dspace.baseUrl}/rdf
#############################
### GENERAL CONFIGURATION ###
#############################
# Address of the public SPARQL endpoint
# DSpace will link to this address and use it if rdf.storage.sparql.endpoint isn't set.
rdf.public.sparql.endpoint = http://localhost/fuseki/dspace/sparql
# Defines the URIGenerator to be used.
# This defines how the identifiers used in the converted RDF data will look
# like.
rdf.URIGenerator = org.dspace.rdf.storage.LocalURIGenerator
# This property sets the class that manage the hole conversion process.
rdf.converter = org.dspace.rdf.conversion.RDFConverterImpl
# The following list contains all plugins to use for the conversion.
# All listed plugins will be used, disable or enable a plugin will highly
# influence the result of the conversion.
rdf.converter.plugins = org.dspace.rdf.conversion.StaticDSOConverterPlugin, \
org.dspace.rdf.conversion.MetadataConverterPlugin, \
org.dspace.rdf.conversion.SimpleDSORelationsConverterPlugin
rdf.converter.DSOtypes = SITE, COMMUNITY, COLLECTION, ITEM
# Configure which class to use to store the converted data.
# Please see the configuration section below regarding the storage class
# you configured here.
rdf.storage = org.dspace.rdf.storage.RDFStorageImpl
# Address of the endpoint for the SPARQL 1.1 Graph Store HTTP Protocoll
# This address is used to store data in the triple store.
rdf.storage.graphstore.endpoint = http://localhost:3030/dspace/data
@@ -51,11 +37,10 @@ rdf.storage.sparql.authentication = no
#rdf.storage.sparql.login = dspace
#rdf.storage.sparql.password = ecapsd
# Set the url of the dspace-rdf module here. This is necessary to use content
# negotiation in dspace-jspui
rdf.contextPath = ${dspace.baseUrl}/rdf
# Configure if content negotiation should be enabled
rdf.contentNegotiation.enable = false
# Which type of DSpaceObjects should be converted? Currently SITE, COMMUNITY
# COLLECTION and ITEM are supported.
rdf.converter.DSOtypes = SITE, COMMUNITY, COLLECTION, ITEM
#############################
### PLUGINS CONFIGURATION ###

View File

@@ -0,0 +1,54 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright (c) 2002-2016, DuraSpace. All rights reserved
Licensed under the DuraSpace License.
A copy of the DuraSpace License has been included in this
distribution and is available at: http://www.dspace.org/license
-->
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-2.5.xsd">
<bean id="rdfFactory" class="org.dspace.rdf.factory.RDFFactoryImpl">
<!-- This defines which URIGenerator will be used. The URIGenerator
itself must be instantiated below.
-->
<property name="generator" ref="org.dspace.rdf.storage.LocalURIGenerator"/>
<property name="storage" ref="org.dspace.rdf.storage.RDFStorage"/>
<property name="converter" ref="org.dspace.rdf.conversion.RDFConverter"/>
</bean>
<!-- configure all plugins the converter should use. If you don't want to
use a plugin, remove it here. -->
<bean id="org.dspace.rdf.conversion.SimpleDSORelationsConverterPlugin" class="org.dspace.rdf.conversion.SimpleDSORelationsConverterPlugin"/>
<bean id="org.dspace.rdf.conversion.MetadataConverterPlugin" class="org.dspace.rdf.conversion.MetadataConverterPlugin"/>
<bean id="org.dspace.rdf.conversion.StaticDSOConverterPlugin" class="org.dspace.rdf.conversion.StaticDSOConverterPlugin"/>
<!-- You do not need to change anything below this line, if you are not
developing DSpace. -->
<!-- Currently there is only one implementation of RDFConverter. This uses
automatically all instantiated plugins. -->
<bean id="org.dspace.rdf.conversion.RDFConverter" class="org.dspace.rdf.conversion.RDFConverterImpl" scope="singleton"/>
<!-- We have multiple instances of URIGenerator using different Persistent
Identifier. Some of them use others as fallback (e.g. generate a DOI,
if there is no DOI fallback to handle, if there is no handle fallback
to local URIs. -->
<bean id="org.dspace.rdf.storage.LocalURIGenerator" class="org.dspace.rdf.storage.LocalURIGenerator"/>
<bean id="org.dspace.rdf.storage.HandleURIGenerator" class="org.dspace.rdf.storage.HandleURIGenerator"/>
<bean id="org.dspace.rdf.storage.DOIURIGenerator" class="org.dspace.rdf.storage.DOIURIGenerator">
<property name="fallback" ref="org.dspace.rdf.storage.LocalURIGenerator"/>
</bean>
<bean id="org.dspace.rdf.storage.DOIHandleURIGenerator" class="org.dspace.rdf.storage.DOIHandleURIGenerator">
<property name="fallback" ref="org.dspace.rdf.storage.HandleURIGenerator"/>
</bean>
<!-- Currently there is only one implementation of RDFStorage -->
<bean id="org.dspace.rdf.storage.RDFStorage" class="org.dspace.rdf.storage.RDFStorageImpl" scope="singleton"/>
</beans>