DS-2701: small cleanup for org.dspace.rdf.

This commit is contained in:
Pascal-Nicolas Becker
2015-08-28 18:55:59 +02:00
parent fbca4716a4
commit c3109e3be3
7 changed files with 62 additions and 80 deletions

View File

@@ -24,6 +24,8 @@ import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.event.Consumer;
import org.dspace.event.Event;
import org.dspace.workflow.WorkflowItemService;
import org.dspace.workflow.factory.WorkflowServiceFactory;
/**
*
@@ -40,6 +42,16 @@ public class RDFConsumer implements Consumer
protected BundleService bundleService;
protected SiteService siteService;
protected WorkspaceItemService workspaceItemService;
protected WorkflowItemService workflowItemService;
@Override
public void initialize() throws Exception {
bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();
bundleService = ContentServiceFactory.getInstance().getBundleService();
siteService = ContentServiceFactory.getInstance().getSiteService();
workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService();
workflowItemService = WorkflowServiceFactory.getInstance().getWorkflowItemService();
}
@Override
public void consume(Context ctx, Event event)
@@ -47,12 +59,10 @@ public class RDFConsumer implements Consumer
{
if (this.toConvert == null)
{
log.debug("Initalized first queue.");
this.toConvert = new LinkedList<>();
}
if (this.toDelete == null)
{
log.debug("Initalized second queue.");
this.toDelete = new LinkedList<>();
}
@@ -119,9 +129,10 @@ public class RDFConsumer implements Consumer
List<Item> items = b.getItems();
for (Item i : items)
{
if (workspaceItemService.findByItem(ctx, i) != null)
if (workspaceItemService.findByItem(ctx, i) != null
|| workflowItemService.findByItem(ctx, i) != null)
{
log.debug("Ignoring Item " + i.getID() + " as a corresponding workspace item exists.");
log.debug("Ignoring Item " + i.getID() + " as a corresponding workspace or workflow item exists.");
continue;
}
DSOIdentifier id = new DSOIdentifier(i, ctx);
@@ -169,9 +180,10 @@ public class RDFConsumer implements Consumer
List<Item> items = bundle.getItems();
for (Item i : items)
{
if (workspaceItemService.findByItem(ctx, i) != null)
if (workspaceItemService.findByItem(ctx, i) != null
|| workflowItemService.findByItem(ctx, i) != null)
{
log.debug("Ignoring Item " + i.getID() + " as a corresponding workspace item exists.");
log.debug("Ignoring Item " + i.getID() + " as a corresponding workspace or workflow item exists.");
continue;
}
DSOIdentifier id = new DSOIdentifier(i, ctx);
@@ -244,12 +256,15 @@ public class RDFConsumer implements Consumer
// ignore unfinished submissions here. Every unfinished submission
// has an workspace item. The item flag "in_archive" doesn't help us
// here as this is also set to false if a newer version was submitted.
if (dso instanceof Item
&& workspaceItemService.findByItem(ctx, (Item) dso) != null)
if (dso instanceof Item)
{
log.debug("Ignoring Item " + dso.getID() + " as a corresponding workspace item exists.");
if (workspaceItemService.findByItem(ctx, (Item) dso) != null
|| workflowItemService.findByItem(ctx, (Item) dso) != null)
{
log.debug("Ignoring Item " + dso.getID() + " as a corresponding workspace or workflow item exists.");
return;
}
}
DSOIdentifier id = new DSOIdentifier(dso, ctx);
// If an item gets withdrawn, a MODIFIY event is fired. We have to
@@ -280,16 +295,20 @@ public class RDFConsumer implements Consumer
}
public void consumeSite(Context ctx, Event event) throws SQLException {
// in case a top level community was added or remove.
// event type remove won't be thrown until DS-1966 is fixed (f.e. by
// merging PR #517).
if (event.getEventType() == Event.ADD
|| event.getEventType() == Event.REMOVE)
|| event.getEventType() == Event.REMOVE
|| event.getEventType() == Event.MODIFY
|| event.getEventType() == Event.MODIFY_METADATA)
{
Site site = siteService.findSite(ctx);
DSOIdentifier id = new DSOIdentifier(Constants.SITE,
site.getID(), site.getHandle(), Arrays.asList(site.getHandle()));
if (!this.toConvert.contains(id)) this.toConvert.add(id);
if (!this.toConvert.contains(id))
{
this.toConvert.add(id);
}
return;
}
log.warn("Got an unexpected Event for the SITE. Event type is "
@@ -435,14 +454,6 @@ public class RDFConsumer implements Consumer
public void finish(Context ctx) throws Exception {
}
@Override
public void initialize() throws Exception {
bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();
bundleService = ContentServiceFactory.getInstance().getBundleService();
siteService = ContentServiceFactory.getInstance().getSiteService();
workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService();
}
class DSOIdentifier
{
@@ -466,8 +477,9 @@ public class RDFConsumer implements Consumer
&& dso.getType() != Constants.COLLECTION
&& dso.getType() != Constants.ITEM)
{
throw new IllegalArgumentException("Provided DSpaceObject does"
+ " not have a handle!");
throw new IllegalArgumentException(
ContentServiceFactory.getInstance().getDSpaceObjectService(dso).getTypeText(dso)
+ " is currently not supported as independent entity by dspace-rdf.");
}
this.type = dso.getType();
this.id = dso.getID();
@@ -479,23 +491,14 @@ public class RDFConsumer implements Consumer
public boolean equals(Object o)
{
if (!(o instanceof DSOIdentifier)) return false;
DSOIdentifier dsoId = (DSOIdentifier) o;
/*
log.warn("Testing if " + Constants.typeText[this.type] + " "
+ Integer.toString(this.id) + " and "
+ Constants.typeText[dsoId.type] + " "
+ Integer.toString(dsoId.id) + " are equal.");
*/
return (this.type == dsoId.type && this.id == dsoId.id);
// Cast o to DSOIdentifier and compare the UUIDs for equality.
return this.id.equals(((DSOIdentifier) o).id);
}
@Override
public int hashCode()
{
/* log.debug("Created hash " + Integer.toString(this.type + (10*this.id)));*/
// as at least up to DSpace version 4.1 DSpaceObjectType is a
// as at least up to DSpace version 5.3 DSpaceObjectType is a
// one-digit number, this should produce an distinct hash.
return this.type + (10*this.id.hashCode());
}

View File

@@ -78,7 +78,7 @@ public class RDFUtil {
* @param context DSpace Context.
* @param type Type of the DSpaceObject you want to generate a URI for (g.e.
* {@link org.dspace.core.Constants#ITEM Constants.ITEM}.
* @param id ID of the DSpaceObject you want to generate a URI for.
* @param id UUID of the DSpaceObject you want to generate a URI for.
* @param handle Handle of the DSpaceObject you want to generate a URI for.
* @return URI to identify the DSO or null if no URI could be generated.
* This can happen f.e. if you use a URIGenerator that uses

View File

@@ -14,6 +14,7 @@ import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.CopyOnWriteArraySet;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
@@ -70,16 +71,16 @@ public class RDFizer {
* multiple DSpaceObjects (g.e. Communities with all Subcommunities and
* Items).
*/
protected Set<String> processed;
protected Set<UUID> processed;
public RDFizer() throws SQLException
public RDFizer()
{
this.configurationService = new DSpace().getConfigurationService();
this.stdout = false;
this.verbose = false;
this.dryrun = false;
this.lang = "TURTLE";
this.processed = new CopyOnWriteArraySet<String>();
this.processed = new CopyOnWriteArraySet<UUID>();
this.context = new Context(Context.READ_ONLY);
this.contentServiceFactory = ContentServiceFactory.getInstance();
this.communityService = contentServiceFactory.getCommunityService();
@@ -232,7 +233,7 @@ public class RDFizer {
throws SQLException
{
report("Starting conversion of all DSpaceItems, this may take a while...");
this.convert(new Site(), true);
this.convert(contentServiceFactory.getSiteService().findSite(context), true);
report("Conversion ended.");
}
@@ -338,7 +339,7 @@ public class RDFizer {
}
markProcessed(dso);
// this is useful to debug depth first search, but it is really noisy.
// log.debug("Procesing " + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " " + dso.getID() + handle + ".");
//log.debug("Procesing " + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " " + dso.getID() + ":" + dso.getHandle() + ".");
// if this method is used for conversion we should check if we have the
// permissions to read a DSO before converting all of it decendents
@@ -410,9 +411,10 @@ public class RDFizer {
}
}
// Currently Bundles and Bitsreams arn't supported as independent entities.
// Currently Bundles and Bitsreams aren't supported as independent entities.
// The should be converted as part of an item. So we do not need to make
// the recursive call for them.
// the recursive call for them. An item itself will be converted as part
// of the callback call below.
//
// if (dso instanceof Item)
// {
@@ -439,16 +441,12 @@ public class RDFizer {
protected boolean isProcessed(DSpaceObject dso)
{
String key = Integer.toString(dso.getType()) + "/"
+ dso.getID().toString();
return this.processed.contains(key);
return this.processed.contains(dso.getID());
}
protected void markProcessed(DSpaceObject dso)
{
String key = Integer.toString(dso.getType()) + "/"
+ dso.getID().toString();
this.processed.add(key);
this.processed.add(dso.getID());
}
protected void report(String message)
@@ -794,17 +792,7 @@ public class RDFizer {
Context context = new Context(Context.READ_ONLY);
RDFizer myself = null;
try {
myself = new RDFizer();
} catch (SQLException ex) {
System.err.println("A problem with the database occurred: "
+ ex.getMessage());
ex.printStackTrace(System.err);
log.error(ex);
context.abort();
System.exit(1);
}
myself.overrideContext(context);
myself.runCLI(args);

View File

@@ -134,8 +134,7 @@ public class MetadataConverterPlugin implements ConverterPlugin
return null;
}
Item item = (Item) dso;
List<MetadataValue> metadata_values = dsoService.getMetadata(item, MetadataSchema.DC_SCHEMA, Item.ANY, Item.ANY, Item.ANY);
List<MetadataValue> metadata_values = dsoService.getMetadata(dso, MetadataSchema.DC_SCHEMA, Item.ANY, Item.ANY, Item.ANY);
for (MetadataValue value : metadata_values)
{
MetadataField metadataField = value.getMetadataField();

View File

@@ -422,8 +422,7 @@ implements ConverterPlugin
}
// add all parents
List<Community> parents = collection.getCommunities();
for (DSpaceObject parent : parents)
for (DSpaceObject parent : collection.getCommunities())
{
if (!RDFUtil.isPublicBoolean(context, parent))
{
@@ -498,8 +497,7 @@ implements ConverterPlugin
}
// add all parents
List<Collection> collections = item.getCollections();
for (DSpaceObject parent : collections)
for (DSpaceObject parent : item.getCollections())
{
if (!RDFUtil.isPublicBoolean(context, parent))
{

View File

@@ -15,11 +15,8 @@ import java.util.Iterator;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.SiteService;
import org.dspace.core.ConfigurationManager;
import org.dspace.rdf.RDFConfiguration;
import org.dspace.utils.DSpace;
/**
*
@@ -35,8 +32,6 @@ public class Negotiator {
public static final int TURTLE = 3;
public static final int N3 = 4;
protected static final SiteService siteService = ContentServiceFactory.getInstance().getSiteService();
public static final String DEFAULT_LANG="html";
private static final Logger log = Logger.getLogger(Negotiator.class);
@@ -267,8 +262,7 @@ public class Negotiator {
// if html is requested we have to forward to the repositories webui.
if ("html".equals(lang))
{
urlBuilder.append((new DSpace()).getConfigurationService()
.getProperty("dspace.url"));
urlBuilder.append(ConfigurationManager.getProperty("dspace.url"));
if (!handle.equals(ConfigurationManager.getProperty("handle.prefix") + "/0"))
{
urlBuilder.append("/handle/");

View File

@@ -31,8 +31,8 @@ implements URIGenerator
private static final Logger log = Logger.getLogger(DOIURIGenerator.class);
/*
* Currently (August 31 2014, in preparation of DSpace 5.0) DSpace supports DOIs for items only. This fallback
* will be used to generate an URI, whenever no DOI was found that could be used to.
* Currently (DSpace 5) DSpace supports DOIs for items only. This fallback
* will be used to generate an URI, whenever no DOI was found.
*/
protected final static URIGenerator fallback = new LocalURIGenerator();
protected final DOIService doiService = IdentifierServiceFactory.getInstance().getDOIService();