Merge branch 'main' into 8320

This commit is contained in:
Mark H. Wood
2022-08-02 09:48:58 -04:00
10 changed files with 416 additions and 215 deletions

View File

@@ -42,6 +42,7 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option; import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Community; import org.dspace.content.Community;
@@ -55,6 +56,8 @@ import org.dspace.content.service.CommunityService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.jdom2.Element; import org.jdom2.Element;
import org.jdom2.output.Format; import org.jdom2.output.Format;
import org.jdom2.output.XMLOutputter; import org.jdom2.output.XMLOutputter;
@@ -79,6 +82,7 @@ import org.xml.sax.SAXException;
* </community> * </community>
* </import_structure> * </import_structure>
* }</pre> * }</pre>
*
* <p> * <p>
* It can be arbitrarily deep, and supports all the metadata elements * It can be arbitrarily deep, and supports all the metadata elements
* that make up the community and collection metadata. See the system * that make up the community and collection metadata. See the system
@@ -107,12 +111,14 @@ public class StructBuilder {
*/ */
private static final Map<String, MetadataFieldName> communityMap = new HashMap<>(); private static final Map<String, MetadataFieldName> communityMap = new HashMap<>();
protected static CommunityService communityService protected static final CommunityService communityService
= ContentServiceFactory.getInstance().getCommunityService(); = ContentServiceFactory.getInstance().getCommunityService();
protected static CollectionService collectionService protected static final CollectionService collectionService
= ContentServiceFactory.getInstance().getCollectionService(); = ContentServiceFactory.getInstance().getCollectionService();
protected static EPersonService ePersonService protected static final EPersonService ePersonService
= EPersonServiceFactory.getInstance().getEPersonService(); = EPersonServiceFactory.getInstance().getEPersonService();
protected static final HandleService handleService
= HandleServiceFactory.getInstance().getHandleService();
/** /**
* Default constructor * Default constructor
@@ -149,6 +155,7 @@ public class StructBuilder {
options.addOption("h", "help", false, "Print this help message."); options.addOption("h", "help", false, "Print this help message.");
options.addOption("?", "help"); options.addOption("?", "help");
options.addOption("x", "export", false, "Export the current structure as XML."); options.addOption("x", "export", false, "Export the current structure as XML.");
options.addOption("k", "keep-handles", false, "Apply Handles from input document.");
options.addOption(Option.builder("e").longOpt("eperson") options.addOption(Option.builder("e").longOpt("eperson")
.desc("User who is manipulating the repository's structure.") .desc("User who is manipulating the repository's structure.")
@@ -225,7 +232,8 @@ public class StructBuilder {
inputStream = new FileInputStream(input); inputStream = new FileInputStream(input);
} }
importStructure(context, inputStream, outputStream); boolean keepHandles = options.hasOption("k");
importStructure(context, inputStream, outputStream, keepHandles);
inputStream.close(); inputStream.close();
outputStream.close(); outputStream.close();
@@ -242,14 +250,17 @@ public class StructBuilder {
* @param context * @param context
* @param input XML which describes the new communities and collections. * @param input XML which describes the new communities and collections.
* @param output input, annotated with the new objects' identifiers. * @param output input, annotated with the new objects' identifiers.
* @param keepHandles true if Handles should be set from input.
* @throws IOException * @throws IOException
* @throws ParserConfigurationException * @throws ParserConfigurationException
* @throws SAXException * @throws SAXException
* @throws TransformerException * @throws TransformerException
* @throws SQLException * @throws SQLException
*/ */
static void importStructure(Context context, InputStream input, OutputStream output) static void importStructure(Context context, InputStream input,
throws IOException, ParserConfigurationException, SQLException, TransformerException, XPathExpressionException { OutputStream output, boolean keepHandles)
throws IOException, ParserConfigurationException, SQLException,
TransformerException, XPathExpressionException {
// load the XML // load the XML
Document document = null; Document document = null;
@@ -277,7 +288,19 @@ public class StructBuilder {
NodeList identifierNodes = (NodeList) xPath.compile("//*[@identifier]") NodeList identifierNodes = (NodeList) xPath.compile("//*[@identifier]")
.evaluate(document, XPathConstants.NODESET); .evaluate(document, XPathConstants.NODESET);
if (identifierNodes.getLength() > 0) { if (identifierNodes.getLength() > 0) {
System.err.println("The input document has 'identifier' attributes, which will be ignored."); if (!keepHandles) {
System.err.println("The input document has 'identifier' attributes, which will be ignored.");
} else {
for (int i = 0; i < identifierNodes.getLength() ; i++) {
String identifier = identifierNodes.item(i).getAttributes().item(0).getTextContent();
if (handleService.resolveToURL(context, identifier) != null) {
System.err.printf("The input document contains handle %s,"
+ " which is in use already. Aborting...%n",
identifier);
System.exit(1);
}
}
}
} }
// load the mappings into the member variable hashmaps // load the mappings into the member variable hashmaps
@@ -302,7 +325,7 @@ public class StructBuilder {
.evaluate(document, XPathConstants.NODESET); .evaluate(document, XPathConstants.NODESET);
// run the import starting with the top level communities // run the import starting with the top level communities
elements = handleCommunities(context, first, null); elements = handleCommunities(context, first, null, keepHandles);
} catch (TransformerException ex) { } catch (TransformerException ex) {
System.err.format("Input content not understood: %s%n", ex.getMessage()); System.err.format("Input content not understood: %s%n", ex.getMessage());
System.exit(1); System.exit(1);
@@ -625,23 +648,29 @@ public class StructBuilder {
* @param context the context of the request * @param context the context of the request
* @param communities a nodelist of communities to create along with their sub-structures * @param communities a nodelist of communities to create along with their sub-structures
* @param parent the parent community of the nodelist of communities to create * @param parent the parent community of the nodelist of communities to create
* @param keepHandles use Handles from input.
* @return an element array containing additional information regarding the * @return an element array containing additional information regarding the
* created communities (e.g. the handles they have been assigned) * created communities (e.g. the handles they have been assigned)
*/ */
private static Element[] handleCommunities(Context context, NodeList communities, Community parent) private static Element[] handleCommunities(Context context, NodeList communities,
throws TransformerException, SQLException, AuthorizeException, XPathExpressionException { Community parent, boolean keepHandles)
throws TransformerException, SQLException, AuthorizeException,
XPathExpressionException {
Element[] elements = new Element[communities.getLength()]; Element[] elements = new Element[communities.getLength()];
XPath xPath = XPathFactory.newInstance().newXPath(); XPath xPath = XPathFactory.newInstance().newXPath();
for (int i = 0; i < communities.getLength(); i++) { for (int i = 0; i < communities.getLength(); i++) {
Community community; Node tn = communities.item(i);
Element element = new Element("community"); Node identifier = tn.getAttributes().getNamedItem("identifier");
// create the community or sub community // create the community or sub community
if (parent != null) { Community community;
if (null == identifier
|| StringUtils.isBlank(identifier.getNodeValue())
|| !keepHandles) {
community = communityService.create(parent, context); community = communityService.create(parent, context);
} else { } else {
community = communityService.create(null, context); community = communityService.create(parent, context, identifier.getNodeValue());
} }
// default the short description to be an empty string // default the short description to be an empty string
@@ -649,7 +678,6 @@ public class StructBuilder {
MD_SHORT_DESCRIPTION, null, " "); MD_SHORT_DESCRIPTION, null, " ");
// now update the metadata // now update the metadata
Node tn = communities.item(i);
for (Map.Entry<String, MetadataFieldName> entry : communityMap.entrySet()) { for (Map.Entry<String, MetadataFieldName> entry : communityMap.entrySet()) {
NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET); NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET);
if (nl.getLength() == 1) { if (nl.getLength() == 1) {
@@ -675,6 +703,7 @@ public class StructBuilder {
// but it's here to keep it separate from the create process in // but it's here to keep it separate from the create process in
// case // case
// we want to move it or make it switchable later // we want to move it or make it switchable later
Element element = new Element("community");
element.setAttribute("identifier", community.getHandle()); element.setAttribute("identifier", community.getHandle());
Element nameElement = new Element("name"); Element nameElement = new Element("name");
@@ -717,12 +746,16 @@ public class StructBuilder {
} }
// handle sub communities // handle sub communities
NodeList subCommunities = (NodeList) xPath.compile("community").evaluate(tn, XPathConstants.NODESET); NodeList subCommunities = (NodeList) xPath.compile("community")
Element[] subCommunityElements = handleCommunities(context, subCommunities, community); .evaluate(tn, XPathConstants.NODESET);
Element[] subCommunityElements = handleCommunities(context,
subCommunities, community, keepHandles);
// handle collections // handle collections
NodeList collections = (NodeList) xPath.compile("collection").evaluate(tn, XPathConstants.NODESET); NodeList collections = (NodeList) xPath.compile("collection")
Element[] collectionElements = handleCollections(context, collections, community); .evaluate(tn, XPathConstants.NODESET);
Element[] collectionElements = handleCollections(context,
collections, community, keepHandles);
int j; int j;
for (j = 0; j < subCommunityElements.length; j++) { for (j = 0; j < subCommunityElements.length; j++) {
@@ -747,21 +780,31 @@ public class StructBuilder {
* @return an Element array containing additional information about the * @return an Element array containing additional information about the
* created collections (e.g. the handle) * created collections (e.g. the handle)
*/ */
private static Element[] handleCollections(Context context, NodeList collections, Community parent) private static Element[] handleCollections(Context context,
NodeList collections, Community parent, boolean keepHandles)
throws SQLException, AuthorizeException, XPathExpressionException { throws SQLException, AuthorizeException, XPathExpressionException {
Element[] elements = new Element[collections.getLength()]; Element[] elements = new Element[collections.getLength()];
XPath xPath = XPathFactory.newInstance().newXPath(); XPath xPath = XPathFactory.newInstance().newXPath();
for (int i = 0; i < collections.getLength(); i++) { for (int i = 0; i < collections.getLength(); i++) {
Element element = new Element("collection"); Node tn = collections.item(i);
Collection collection = collectionService.create(context, parent); Node identifier = tn.getAttributes().getNamedItem("identifier");
// Create the Collection.
Collection collection;
if (null == identifier
|| StringUtils.isBlank(identifier.getNodeValue())
|| !keepHandles) {
collection = collectionService.create(context, parent);
} else {
collection = collectionService.create(context, parent, identifier.getNodeValue());
}
// default the short description to the empty string // default the short description to the empty string
collectionService.setMetadataSingleValue(context, collection, collectionService.setMetadataSingleValue(context, collection,
MD_SHORT_DESCRIPTION, Item.ANY, " "); MD_SHORT_DESCRIPTION, Item.ANY, " ");
// import the rest of the metadata // import the rest of the metadata
Node tn = collections.item(i);
for (Map.Entry<String, MetadataFieldName> entry : collectionMap.entrySet()) { for (Map.Entry<String, MetadataFieldName> entry : collectionMap.entrySet()) {
NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET); NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET);
if (nl.getLength() == 1) { if (nl.getLength() == 1) {
@@ -772,6 +815,7 @@ public class StructBuilder {
collectionService.update(context, collection); collectionService.update(context, collection);
Element element = new Element("collection");
element.setAttribute("identifier", collection.getHandle()); element.setAttribute("identifier", collection.getHandle());
Element nameElement = new Element("name"); Element nameElement = new Element("name");

View File

@@ -64,17 +64,21 @@ import org.springframework.beans.factory.annotation.Autowired;
* Item exporter to create simple AIPs for DSpace content. Currently exports * Item exporter to create simple AIPs for DSpace content. Currently exports
* individual items, or entire collections. For instructions on use, see * individual items, or entire collections. For instructions on use, see
* printUsage() method. * printUsage() method.
* <P> * <p>
* ItemExport creates the simple AIP package that the importer also uses. It * ItemExport creates the simple AIP package that the importer also uses. It
* consists of: * consists of:
* <P> * <pre>{@code
* /exportdir/42/ (one directory per item) / dublin_core.xml - qualified dublin * /exportdir/42/ (one directory per item)
* core in RDF schema / contents - text file, listing one file per line / file1 * / dublin_core.xml - qualified dublin core in RDF schema
* - files contained in the item / file2 / ... * / contents - text file, listing one file per line
* <P> * / file1 - files contained in the item
* / file2
* / ...
* }</pre>
* <p>
* issues -doesn't handle special characters in metadata (needs to turn {@code &'s} into * issues -doesn't handle special characters in metadata (needs to turn {@code &'s} into
* {@code &amp;}, etc.) * {@code &amp;}, etc.)
* <P> * <p>
* Modified by David Little, UCSD Libraries 12/21/04 to allow the registration * Modified by David Little, UCSD Libraries 12/21/04 to allow the registration
* of files (bitstreams) into DSpace. * of files (bitstreams) into DSpace.
* *
@@ -101,7 +105,7 @@ public class ItemExportServiceImpl implements ItemExportService {
/** /**
* log4j logger * log4j logger
*/ */
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemExportServiceImpl.class); private final Logger log = org.apache.logging.log4j.LogManager.getLogger();
protected ItemExportServiceImpl() { protected ItemExportServiceImpl() {
@@ -168,6 +172,7 @@ public class ItemExportServiceImpl implements ItemExportService {
// make it this far, now start exporting // make it this far, now start exporting
writeMetadata(c, myItem, itemDir, migrate); writeMetadata(c, myItem, itemDir, migrate);
writeBitstreams(c, myItem, itemDir, excludeBitstreams); writeBitstreams(c, myItem, itemDir, excludeBitstreams);
writeCollections(myItem, itemDir);
if (!migrate) { if (!migrate) {
writeHandle(c, myItem, itemDir); writeHandle(c, myItem, itemDir);
} }
@@ -343,6 +348,33 @@ public class ItemExportServiceImpl implements ItemExportService {
} }
} }
/**
* Create the 'collections' file. List handles of all Collections which
* contain this Item. The "owning" Collection is listed first.
*
* @param item list collections holding this Item.
* @param destDir write the file here.
* @throws IOException if the file cannot be created or written.
*/
protected void writeCollections(Item item, File destDir)
throws IOException {
File outFile = new File(destDir, "collections");
if (outFile.createNewFile()) {
try (PrintWriter out = new PrintWriter(new FileWriter(outFile))) {
String ownerHandle = item.getOwningCollection().getHandle();
out.println(ownerHandle);
for (Collection collection : item.getCollections()) {
String collectionHandle = collection.getHandle();
if (!collectionHandle.equals(ownerHandle)) {
out.println(collectionHandle);
}
}
}
} else {
throw new IOException("Cannot create 'collections' in " + destDir);
}
}
/** /**
* Create both the bitstreams and the contents file. Any bitstreams that * Create both the bitstreams and the contents file. Any bitstreams that
* were originally registered will be marked in the contents file as such. * were originally registered will be marked in the contents file as such.

View File

@@ -12,7 +12,6 @@ import java.util.Map;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.logic.LogicalStatementException; import org.dspace.content.logic.LogicalStatementException;
import org.dspace.content.service.CollectionService; import org.dspace.content.service.CollectionService;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
@@ -32,10 +31,10 @@ public abstract class AbstractCondition implements Condition {
private Map<String, Object> parameters; private Map<String, Object> parameters;
// Declare and instantiate spring services // Declare and instantiate spring services
//@Autowired(required = true) @Autowired(required = true)
protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); protected ItemService itemService;
//@Autowired(required = true) @Autowired(required = true)
protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); protected CollectionService collectionService;
@Autowired(required = true) @Autowired(required = true)
protected HandleService handleService; protected HandleService handleService;

View File

@@ -69,14 +69,21 @@ public class OpenUrlServiceImpl implements OpenUrlService {
*/ */
protected int getResponseCodeFromUrl(final String urlStr) throws IOException { protected int getResponseCodeFromUrl(final String urlStr) throws IOException {
HttpGet httpGet = new HttpGet(urlStr); HttpGet httpGet = new HttpGet(urlStr);
RequestConfig requestConfig = getRequestConfigBuilder().setConnectTimeout(10 * 1000).build(); HttpClient httpClient = getHttpClient(getHttpClientRequestConfig());
HttpClient httpClient = HttpClientBuilder.create().setDefaultRequestConfig(requestConfig).build();
HttpResponse httpResponse = httpClient.execute(httpGet); HttpResponse httpResponse = httpClient.execute(httpGet);
return httpResponse.getStatusLine().getStatusCode(); return httpResponse.getStatusLine().getStatusCode();
} }
protected RequestConfig.Builder getRequestConfigBuilder() { protected HttpClient getHttpClient(RequestConfig requestConfig) {
return RequestConfig.custom(); return HttpClientBuilder.create()
.setDefaultRequestConfig(requestConfig)
.build();
}
protected RequestConfig getHttpClientRequestConfig() {
return RequestConfig.custom()
.setConnectTimeout(10 * 1000)
.build();
} }
/** /**

View File

@@ -8,6 +8,7 @@
package org.dspace.administer; package org.dspace.administer;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
@@ -18,9 +19,10 @@ import java.sql.SQLException;
import java.util.Iterator; import java.util.Iterator;
import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Source; import javax.xml.transform.Source;
import javax.xml.transform.TransformerException;
import javax.xml.transform.stream.StreamSource; import javax.xml.transform.stream.StreamSource;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.AbstractIntegrationTest; import org.dspace.AbstractIntegrationTest;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection; import org.dspace.content.Collection;
@@ -29,13 +31,11 @@ import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService; import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService; import org.dspace.content.service.CommunityService;
import org.junit.After; import org.dspace.handle.Handle;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Attr; import org.w3c.dom.Attr;
import org.w3c.dom.Node; import org.w3c.dom.Node;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
@@ -53,7 +53,7 @@ import org.xmlunit.diff.Difference;
*/ */
public class StructBuilderIT public class StructBuilderIT
extends AbstractIntegrationTest { extends AbstractIntegrationTest {
private static final Logger log = LoggerFactory.getLogger(StructBuilderIT.class); private static final Logger log = LogManager.getLogger();
private static final CommunityService communityService private static final CommunityService communityService
= ContentServiceFactory.getInstance().getCommunityService(); = ContentServiceFactory.getInstance().getCommunityService();
@@ -89,27 +89,28 @@ public class StructBuilderIT
context.restoreAuthSystemState(); context.restoreAuthSystemState();
} }
@After private static final String COMMUNITY_0_HANDLE = "https://hdl.handle.net/1/1";
public void tearDown() { private static final String COMMUNITY_0_0_HANDLE = "https://hdl.handle.net/1/1.1";
} private static final String COLLECTION_0_0_0_HANDLE = "https://hdl.handle.net/1/1.1.1";
private static final String COLLECTION_0_1_HANDLE = "https://hdl.handle.net/1/1.2";
/** Test structure document. */ /** Test structure document. */
private static final String IMPORT_DOCUMENT = private static final String IMPORT_DOCUMENT =
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<import_structure>\n" + "<import_structure>\n" +
" <community>\n" + " <community identifier='" + COMMUNITY_0_HANDLE + "'>\n" +
" <name>Top Community 0</name>\n" + " <name>Top Community 0</name>\n" +
" <description>A top level community</description>\n" + " <description>A top level community</description>\n" +
" <intro>Testing 1 2 3</intro>\n" + " <intro>Testing 1 2 3</intro>\n" +
" <copyright>1969</copyright>\n" + " <copyright>1969</copyright>\n" +
" <sidebar>A sidebar</sidebar>\n" + " <sidebar>A sidebar</sidebar>\n" +
" <community>\n" + " <community identifier='" + COMMUNITY_0_0_HANDLE + "'>\n" +
" <name>Sub Community 0.0</name>\n" + " <name>Sub Community 0.0</name>\n" +
" <description>A sub community</description>\n" + " <description>A sub community</description>\n" +
" <intro>Live from New York....</intro>\n" + " <intro>Live from New York....</intro>\n" +
" <copyright>1957</copyright>\n" + " <copyright>1957</copyright>\n" +
" <sidebar>Another sidebar</sidebar>\n" + " <sidebar>Another sidebar</sidebar>\n" +
" <collection>\n" + " <collection identifier='" + COLLECTION_0_0_0_HANDLE + "'>\n" +
" <name>Collection 0.0.0</name>\n" + " <name>Collection 0.0.0</name>\n" +
" <description>A collection</description>\n" + " <description>A collection</description>\n" +
" <intro>Our next guest needs no introduction</intro>\n" + " <intro>Our next guest needs no introduction</intro>\n" +
@@ -119,7 +120,14 @@ public class StructBuilderIT
" <provenance>Testing</provenance>\n" + " <provenance>Testing</provenance>\n" +
" </collection>\n" + " </collection>\n" +
" </community>\n" + " </community>\n" +
" <collection>\n" + " <community>\n" +
" <name>Sub Community 0.1</name>\n" +
" <description>A sub community with no handle</description>\n" +
" <intro>Stop me if you've heard this one</intro>\n" +
" <copyright>2525</copyright>\n" +
" <sidebar>One more sidebar</sidebar>\n" +
" </community>\n" +
" <collection identifier='" + COLLECTION_0_1_HANDLE + "'>\n" +
" <name>Collection 0.1</name>\n" + " <name>Collection 0.1</name>\n" +
" <description>Another collection</description>\n" + " <description>Another collection</description>\n" +
" <intro>Fourscore and seven years ago</intro>\n" + " <intro>Fourscore and seven years ago</intro>\n" +
@@ -150,7 +158,7 @@ public class StructBuilderIT
* @throws java.lang.Exception passed through. * @throws java.lang.Exception passed through.
*/ */
@Test @Test
public void testImportStructure() public void testImportStructureWithoutHandles()
throws Exception { throws Exception {
System.out.println("importStructure"); System.out.println("importStructure");
@@ -160,11 +168,7 @@ public class StructBuilderIT
byte[] inputBytes = IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8); byte[] inputBytes = IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8);
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
try (InputStream input = new ByteArrayInputStream(inputBytes);) { try (InputStream input = new ByteArrayInputStream(inputBytes);) {
StructBuilder.importStructure(context, input, outputDocument); StructBuilder.importStructure(context, input, outputDocument, false);
} catch (IOException | SQLException
| ParserConfigurationException | TransformerException ex) {
System.err.println(ex.getMessage());
System.exit(1);
} finally { } finally {
context.restoreAuthSystemState(); context.restoreAuthSystemState();
} }
@@ -180,7 +184,81 @@ public class StructBuilderIT
IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8))); IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8)));
Diff myDiff = DiffBuilder.compare(reference).withTest(output) Diff myDiff = DiffBuilder.compare(reference).withTest(output)
.normalizeWhitespace() .normalizeWhitespace()
// .withNodeFilter(new MyNodeFilter()) .withAttributeFilter((Attr attr) ->
!attr.getName().equals("identifier"))
.checkForIdentical()
.build();
// Was there a difference?
// Always output differences -- one is expected.
ComparisonFormatter formatter = new DefaultComparisonFormatter();
for (Difference difference : myDiff.getDifferences()) {
System.err.println(difference.toString(formatter));
}
// Test for *significant* differences.
assertFalse("Output does not match input.", isDifferent(myDiff));
// TODO spot-check some objects.
}
/**
* Test of importStructure method, with given Handles.
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testImportStructureWithHandles()
throws Exception {
System.out.println("importStructure");
// Run the method under test and collect its output.
ByteArrayOutputStream outputDocument
= new ByteArrayOutputStream(IMPORT_DOCUMENT.length() * 2 * 2);
byte[] inputBytes = IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8);
context.turnOffAuthorisationSystem();
try (InputStream input = new ByteArrayInputStream(inputBytes);) {
StructBuilder.importStructure(context, input, outputDocument, true);
} finally {
context.restoreAuthSystemState();
}
boolean found;
// Check a chosen Community for the right Handle.
found = false;
for (Community community : communityService.findAllTop(context)) {
for (Handle handle : community.getHandles()) {
if (handle.getHandle().equals(COMMUNITY_0_HANDLE)) {
found = true;
break;
}
}
}
assertTrue("A community should have its specified handle", found);
// Check a chosen Collection for the right Handle.
found = false;
for (Collection collection : collectionService.findAll(context)) {
for (Handle handle : collection.getHandles()) {
if (handle.getHandle().equals(COLLECTION_0_1_HANDLE)) {
found = true;
break;
}
}
}
assertTrue("A collection should have its specified handle", found);
// Compare import's output with its input.
// N.B. here we rely on StructBuilder to emit communities and
// collections in the same order as the input document. If that changes,
// we will need a smarter NodeMatcher, probably based on <name> children.
Source output = new StreamSource(
new ByteArrayInputStream(outputDocument.toByteArray()));
Source reference = new StreamSource(
new ByteArrayInputStream(
IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8)));
Diff myDiff = DiffBuilder.compare(reference).withTest(output)
.normalizeWhitespace()
.withAttributeFilter((Attr attr) -> .withAttributeFilter((Attr attr) ->
!attr.getName().equals("identifier")) !attr.getName().equals("identifier"))
.checkForIdentical() .checkForIdentical()
@@ -236,7 +314,6 @@ public class StructBuilderIT
EXPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8))); EXPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8)));
Diff myDiff = DiffBuilder.compare(reference).withTest(output) Diff myDiff = DiffBuilder.compare(reference).withTest(output)
.normalizeWhitespace() .normalizeWhitespace()
// .withNodeFilter(new MyNodeFilter())
.withAttributeFilter((Attr attr) -> .withAttributeFilter((Attr attr) ->
!attr.getName().equals("identifier")) !attr.getName().equals("identifier"))
.checkForIdentical() .checkForIdentical()
@@ -310,23 +387,4 @@ public class StructBuilderIT
// There must be at most one difference. // There must be at most one difference.
return diffIterator.hasNext(); return diffIterator.hasNext();
} }
/**
* Reject uninteresting nodes. (currently commented out of tests above)
*/
/*private static class MyNodeFilter implements Predicate<Node> {
private static final List<String> dontCare = Arrays.asList(
"description",
"intro",
"copyright",
"sidebar",
"license",
"provenance");
@Override
public boolean test(Node node) {
String type = node.getLocalName();
return ! dontCare.contains(type);
}
}*/
} }

View File

@@ -408,6 +408,7 @@ public class LogicalFilterTest extends AbstractUnitTest {
// Create condition to match pattern on dc.title metadata // Create condition to match pattern on dc.title metadata
Condition condition = new MetadataValuesMatchCondition(); Condition condition = new MetadataValuesMatchCondition();
condition.setItemService(ContentServiceFactory.getInstance().getItemService());
Map<String, Object> parameters = new HashMap<>(); Map<String, Object> parameters = new HashMap<>();
// Match on the dc.title field // Match on the dc.title field
parameters.put("field", "dc.title"); parameters.put("field", "dc.title");
@@ -461,6 +462,7 @@ public class LogicalFilterTest extends AbstractUnitTest {
// Instantiate new filter for testing this condition // Instantiate new filter for testing this condition
DefaultFilter filter = new DefaultFilter(); DefaultFilter filter = new DefaultFilter();
Condition condition = new InCollectionCondition(); Condition condition = new InCollectionCondition();
condition.setItemService(ContentServiceFactory.getInstance().getItemService());
Map<String, Object> parameters = new HashMap<>(); Map<String, Object> parameters = new HashMap<>();
// Add collectionOne handle to the collections parameter - ie. we are testing to see if the item is // Add collectionOne handle to the collections parameter - ie. we are testing to see if the item is

View File

@@ -9,9 +9,10 @@ package org.dspace.statistics.export.service;
import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.closeTo;
import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.doCallRealMethod; import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
@@ -20,20 +21,23 @@ import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
import java.io.IOException; import java.io.IOException;
import java.math.BigDecimal;
import java.net.HttpURLConnection; import java.net.HttpURLConnection;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.Date;
import java.util.List; import java.util.List;
import org.apache.http.client.config.RequestConfig; import org.apache.http.HttpResponse;
import org.apache.http.StatusLine;
import org.apache.http.client.HttpClient;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.statistics.export.OpenURLTracker; import org.dspace.statistics.export.OpenURLTracker;
import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import org.mockito.InjectMocks; import org.mockito.ArgumentCaptor;
import org.mockito.Mock; import org.mockito.Mock;
import org.mockito.Mockito; import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner; import org.mockito.junit.MockitoJUnitRunner;
/** /**
@@ -42,24 +46,64 @@ import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class) @RunWith(MockitoJUnitRunner.class)
public class OpenUrlServiceImplTest { public class OpenUrlServiceImplTest {
@InjectMocks /**
@Spy * NOTE: Initialized as a Mockito spy in {@link #setUp()}.
*/
private OpenUrlServiceImpl openUrlService; private OpenUrlServiceImpl openUrlService;
@Mock @Mock
private FailedOpenURLTrackerService failedOpenURLTrackerService; private FailedOpenURLTrackerService failedOpenURLTrackerService;
@Mock
private HttpClient httpClient;
@Before
public void setUp() throws Exception {
// spy on the class under test
openUrlService = Mockito.spy(OpenUrlServiceImpl.class);
// manually hook up dependencies (@autowire doesn't work when creating instances using Mockito)
openUrlService.failedOpenUrlTrackerService = failedOpenURLTrackerService;
// IMPORTANT: mock http client to prevent making REAL http requests
doReturn(httpClient).when(openUrlService).getHttpClient(any());
}
/**
* Create a mock http response with the given status code.
* @param statusCode the http status code to use in the mock.
* @return a mocked http response.
*/
protected HttpResponse createMockHttpResponse(int statusCode) {
StatusLine statusLine = mock(StatusLine.class);
when(statusLine.getStatusCode()).thenReturn(statusCode);
HttpResponse httpResponse = mock(HttpResponse.class);
when(httpResponse.getStatusLine()).thenReturn(statusLine);
return httpResponse;
}
/**
* Create a mock open url tracker with the given url.
* @param url the url to use in the mock.
* @return a mocked open url tracker.
*/
protected OpenURLTracker createMockTracker(String url) {
OpenURLTracker tracker = mock(OpenURLTracker.class);
when(tracker.getUrl()).thenReturn(url);
return tracker;
}
/** /**
* Test the processUrl method * Test the processUrl method
* @throws IOException
* @throws SQLException
*/ */
@Test @Test
public void testProcessUrl() throws IOException, SQLException { public void testProcessUrl() throws IOException, SQLException {
Context context = mock(Context.class); Context context = mock(Context.class);
doReturn(HttpURLConnection.HTTP_OK).when(openUrlService) doReturn(createMockHttpResponse(HttpURLConnection.HTTP_OK)).when(httpClient).execute(any());
.getResponseCodeFromUrl(anyString());
openUrlService.processUrl(context, "test-url"); openUrlService.processUrl(context, "test-url");
verify(openUrlService, times(0)).logfailed(context, "test-url"); verify(openUrlService, times(0)).logfailed(context, "test-url");
@@ -67,86 +111,90 @@ public class OpenUrlServiceImplTest {
/** /**
* Test the processUrl method when the url connection fails * Test the processUrl method when the url connection fails
* @throws IOException
* @throws SQLException
*/ */
@Test @Test
public void testProcessUrlOnFail() throws IOException, SQLException { public void testProcessUrlOnFail() throws IOException, SQLException {
Context context = mock(Context.class); Context context = mock(Context.class);
doReturn(HttpURLConnection.HTTP_INTERNAL_ERROR).when(openUrlService) doReturn(createMockHttpResponse(HttpURLConnection.HTTP_INTERNAL_ERROR)).when(httpClient).execute(any());
.getResponseCodeFromUrl(anyString());
doNothing().when(openUrlService).logfailed(any(Context.class), anyString()); doNothing().when(openUrlService).logfailed(any(Context.class), anyString());
openUrlService.processUrl(context, "test-url"); openUrlService.processUrl(context, "test-url");
verify(openUrlService, times(1)).logfailed(context, "test-url"); verify(openUrlService, times(1)).logfailed(context, "test-url");
} }
/** /**
* Test the ReprocessFailedQueue method * Test the ReprocessFailedQueue method
* @throws SQLException
*/ */
@Test @Test
public void testReprocessFailedQueue() throws SQLException { public void testReprocessFailedQueue() throws IOException, SQLException {
Context context = mock(Context.class); Context context = mock(Context.class);
List<OpenURLTracker> trackers = new ArrayList<>(); List<OpenURLTracker> trackers = List.of(
OpenURLTracker tracker1 = mock(OpenURLTracker.class); createMockTracker("tacker1"),
OpenURLTracker tracker2 = mock(OpenURLTracker.class); createMockTracker("tacker2"),
OpenURLTracker tracker3 = mock(OpenURLTracker.class); createMockTracker("tacker3")
);
trackers.add(tracker1);
trackers.add(tracker2);
trackers.add(tracker3);
when(failedOpenURLTrackerService.findAll(any(Context.class))).thenReturn(trackers); when(failedOpenURLTrackerService.findAll(any(Context.class))).thenReturn(trackers);
doNothing().when(openUrlService).tryReprocessFailed(any(Context.class), any(OpenURLTracker.class));
// NOTE: first http request will return status code 500, next one 404, then 200
doReturn(
createMockHttpResponse(HttpURLConnection.HTTP_INTERNAL_ERROR),
createMockHttpResponse(HttpURLConnection.HTTP_NOT_FOUND),
createMockHttpResponse(HttpURLConnection.HTTP_OK)
).when(httpClient).execute(any());
openUrlService.reprocessFailedQueue(context); openUrlService.reprocessFailedQueue(context);
verify(openUrlService, times(3)).tryReprocessFailed(any(Context.class), any(OpenURLTracker.class)); verify(openUrlService, times(3)).tryReprocessFailed(any(Context.class), any(OpenURLTracker.class));
// NOTE: http request for tracker 1 and 2 failed, so tracker 1 and 2 should be kept
// http request for tracker 3 succeeded, so tracker 3 should be removed
verify(failedOpenURLTrackerService, times(0)).remove(any(Context.class), eq(trackers.get(0)));
verify(failedOpenURLTrackerService, times(0)).remove(any(Context.class), eq(trackers.get(1)));
verify(failedOpenURLTrackerService, times(1)).remove(any(Context.class), eq(trackers.get(2)));
} }
/** /**
* Test the method that logs the failed urls in the db * Test the method that logs the failed urls in the db
* @throws SQLException
*/ */
@Test @Test
public void testLogfailed() throws SQLException { public void testLogfailed() throws SQLException {
Context context = mock(Context.class); Context context = mock(Context.class);
OpenURLTracker tracker1 = mock(OpenURLTracker.class); OpenURLTracker tracker1 = mock(OpenURLTracker.class);
doCallRealMethod().when(tracker1).setUrl(anyString());
when(tracker1.getUrl()).thenCallRealMethod();
when(failedOpenURLTrackerService.create(any(Context.class))).thenReturn(tracker1); when(failedOpenURLTrackerService.create(any(Context.class))).thenReturn(tracker1);
String failedUrl = "failed-url"; String failedUrl = "failed-url";
openUrlService.logfailed(context, failedUrl); openUrlService.logfailed(context, failedUrl);
assertThat(tracker1.getUrl(), is(failedUrl)); verify(tracker1).setUrl(failedUrl);
// NOTE: verify that setUploadDate received a timestamp whose value is no less than 5 seconds from now
ArgumentCaptor<Date> dateArgCaptor = ArgumentCaptor.forClass(Date.class);
verify(tracker1).setUploadDate(dateArgCaptor.capture());
assertThat(
new BigDecimal(dateArgCaptor.getValue().getTime()),
closeTo(new BigDecimal(new Date().getTime()), new BigDecimal(5000))
);
} }
/** /**
* Tests whether the timeout gets set to 10 seconds when processing a url * Tests whether the timeout gets set to 10 seconds when processing a url
* @throws SQLException
*/ */
@Test @Test
public void testTimeout() throws SQLException { public void testTimeout() throws IOException, SQLException {
Context context = mock(Context.class); Context context = mock(Context.class);
String URL = "http://bla.com";
RequestConfig.Builder requestConfig = mock(RequestConfig.Builder.class); // 1. verify processUrl calls getHttpClient and getHttpClientRequestConfig once
doReturn(requestConfig).when(openUrlService).getRequestConfigBuilder(); doReturn(createMockHttpResponse(HttpURLConnection.HTTP_OK)).when(httpClient).execute(any());
doReturn(requestConfig).when(requestConfig).setConnectTimeout(10 * 1000); openUrlService.processUrl(context, "test-url");
doReturn(RequestConfig.custom().build()).when(requestConfig).build(); verify(openUrlService).getHttpClient(any());
verify(openUrlService).getHttpClientRequestConfig();
openUrlService.processUrl(context, URL); // 2. verify that getHttpClientRequestConfig sets the timeout
assertThat(openUrlService.getHttpClientRequestConfig().getConnectTimeout(), is(10 * 1000));
Mockito.verify(requestConfig).setConnectTimeout(10 * 1000);
} }
} }

View File

@@ -8,6 +8,10 @@
package org.dspace.xoai.app; package org.dspace.xoai.app;
import static com.lyncode.xoai.dataprovider.core.Granularity.Second; import static com.lyncode.xoai.dataprovider.core.Granularity.Second;
import static java.util.Objects.nonNull;
import static org.apache.commons.lang.StringUtils.EMPTY;
import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_PARAM;
import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START;
import static org.dspace.xoai.util.ItemUtils.retrieveMetadata; import static org.dspace.xoai.util.ItemUtils.retrieveMetadata;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
@@ -38,6 +42,8 @@ import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrQuery.ORDER; import org.apache.solr.client.solrj.SolrQuery.ORDER;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputDocument;
import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.ResourcePolicy;
@@ -77,6 +83,7 @@ import org.springframework.context.annotation.AnnotationConfigApplicationContext
public class XOAI { public class XOAI {
private static Logger log = LogManager.getLogger(XOAI.class); private static Logger log = LogManager.getLogger(XOAI.class);
// needed because the solr query only returns 10 rows by default
private final Context context; private final Context context;
private boolean optimize; private boolean optimize;
private final boolean verbose; private final boolean verbose;
@@ -94,8 +101,8 @@ public class XOAI {
private final AuthorizeService authorizeService; private final AuthorizeService authorizeService;
private final ItemService itemService; private final ItemService itemService;
private final static ConfigurationService configurationService = DSpaceServicesFactory private final static ConfigurationService configurationService = DSpaceServicesFactory.getInstance()
.getInstance().getConfigurationService(); .getConfigurationService();
private List<XOAIExtensionItemCompilePlugin> extensionPlugins; private List<XOAIExtensionItemCompilePlugin> extensionPlugins;
@@ -152,9 +159,8 @@ public class XOAI {
System.out.println("Using full import."); System.out.println("Using full import.");
result = this.indexAll(); result = this.indexAll();
} else { } else {
SolrQuery solrParams = new SolrQuery("*:*") SolrQuery solrParams = new SolrQuery("*:*").addField("item.lastmodified")
.addField("item.lastmodified") .addSort("item.lastmodified", ORDER.desc).setRows(1);
.addSort("item.lastmodified", ORDER.desc).setRows(1);
SolrDocumentList results = DSpaceSolrSearch.query(solrServerResolver.getServer(), solrParams); SolrDocumentList results = DSpaceSolrSearch.query(solrServerResolver.getServer(), solrParams);
if (results.getNumFound() == 0) { if (results.getNumFound() == 0) {
@@ -167,7 +173,6 @@ public class XOAI {
} }
solrServerResolver.getServer().commit(); solrServerResolver.getServer().commit();
if (optimize) { if (optimize) {
println("Optimizing Index"); println("Optimizing Index");
solrServerResolver.getServer().optimize(); solrServerResolver.getServer().optimize();
@@ -183,12 +188,10 @@ public class XOAI {
} }
private int index(Date last) throws DSpaceSolrIndexerException, IOException { private int index(Date last) throws DSpaceSolrIndexerException, IOException {
System.out System.out.println("Incremental import. Searching for documents modified after: " + last.toString());
.println("Incremental import. Searching for documents modified after: "
+ last.toString());
/* /*
* Index all changed or new items or items whose visibility is viable to * Index all changed or new items or items whose visibility is viable to change
* change due to an embargo. * due to an embargo.
*/ */
try { try {
Iterator<Item> discoverableChangedItems = itemService Iterator<Item> discoverableChangedItems = itemService
@@ -204,31 +207,55 @@ public class XOAI {
} }
/** /**
* Get all items already in the index which are viable to change visibility * Get all items already in the index which are viable to change visibility due
* due to an embargo. Only consider those which haven't been modified * to an embargo. Only consider those which haven't been modified anyways since
* anyways since the last update, so they aren't updated twice in one import * the last update, so they aren't updated twice in one import run.
* run.
* *
* @param last * @param last maximum date for an item to be considered for an update
* maximum date for an item to be considered for an update * @return Iterator over list of items which might have changed their visibility
* @return Iterator over list of items which might have changed their * since the last update.
* visibility since the last update.
* @throws DSpaceSolrIndexerException * @throws DSpaceSolrIndexerException
*/ */
private Iterator<Item> getItemsWithPossibleChangesBefore(Date last) throws DSpaceSolrIndexerException, IOException { private Iterator<Item> getItemsWithPossibleChangesBefore(Date last) throws DSpaceSolrIndexerException, IOException {
try { try {
SolrQuery params = new SolrQuery("item.willChangeStatus:true").addField("item.id"); SolrQuery params = new SolrQuery("item.willChangeStatus:true").addField("item.id").setRows(100)
SolrDocumentList documents = DSpaceSolrSearch.query(solrServerResolver.getServer(), params); .addSort("item.handle", SolrQuery.ORDER.asc);
SolrClient solrClient = solrServerResolver.getServer();
List<Item> items = new LinkedList<>(); List<Item> items = new LinkedList<>();
for (int i = 0; i < documents.getNumFound(); i++) { boolean done = false;
Item item = itemService.find(context, /*
UUID.fromString((String) documents.get(i).getFieldValue("item.id"))); * Using solr cursors to paginate and prevent the query from returning 10
if (item.getLastModified().before(last)) { * SolrDocument objects only.
items.add(item); */
String cursorMark = CURSOR_MARK_START;
String nextCursorMark = EMPTY;
while (!done) {
params.set(CURSOR_MARK_PARAM, cursorMark);
QueryResponse response = solrClient.query(params);
nextCursorMark = response.getNextCursorMark();
for (SolrDocument document : response.getResults()) {
Item item = itemService.find(context, UUID.fromString((String) document.getFieldValue("item.id")));
if (nonNull(item)) {
if (nonNull(item.getLastModified())) {
if (item.getLastModified().before(last)) {
items.add(item);
}
} else {
log.warn("Skipping item with id " + item.getID());
}
}
} }
if (cursorMark.equals(nextCursorMark)) {
done = true;
}
cursorMark = nextCursorMark;
} }
return items.iterator(); return items.iterator();
} catch (SolrServerException | SQLException | DSpaceSolrException ex) { } catch (SolrServerException | SQLException ex) {
throw new DSpaceSolrIndexerException(ex.getMessage(), ex); throw new DSpaceSolrIndexerException(ex.getMessage(), ex);
} }
} }
@@ -250,11 +277,10 @@ public class XOAI {
} }
/** /**
* Check if an item is already indexed. Using this, it is possible to check * Check if an item is already indexed. Using this, it is possible to check if
* if withdrawn or nondiscoverable items have to be indexed at all. * withdrawn or nondiscoverable items have to be indexed at all.
* *
* @param item * @param item Item that should be checked for its presence in the index.
* Item that should be checked for its presence in the index.
* @return has it been indexed? * @return has it been indexed?
*/ */
private boolean checkIfIndexed(Item item) throws IOException { private boolean checkIfIndexed(Item item) throws IOException {
@@ -266,11 +292,11 @@ public class XOAI {
return false; return false;
} }
} }
/**
/**
* Check if an item is flagged visible in the index. * Check if an item is flagged visible in the index.
* *
* @param item * @param item Item that should be checked for its presence in the index.
* Item that should be checked for its presence in the index.
* @return has it been indexed? * @return has it been indexed?
*/ */
private boolean checkIfVisibleInOAI(Item item) throws IOException { private boolean checkIfVisibleInOAI(Item item) throws IOException {
@@ -287,8 +313,7 @@ public class XOAI {
} }
} }
private int index(Iterator<Item> iterator) private int index(Iterator<Item> iterator) throws DSpaceSolrIndexerException {
throws DSpaceSolrIndexerException {
try { try {
int i = 0; int i = 0;
int batchSize = configurationService.getIntProperty("oai.import.batch.size", 1000); int batchSize = configurationService.getIntProperty("oai.import.batch.size", 1000);
@@ -302,7 +327,7 @@ public class XOAI {
} else { } else {
list.add(this.index(item)); list.add(this.index(item));
} }
//Uncache the item to keep memory consumption low // Uncache the item to keep memory consumption low
context.uncacheEntity(item); context.uncacheEntity(item);
} catch (SQLException | IOException | XMLStreamException | WritingXmlException ex) { } catch (SQLException | IOException | XMLStreamException | WritingXmlException ex) {
@@ -334,12 +359,11 @@ public class XOAI {
} }
/** /**
* Method to get the most recent date on which the item changed concerning * Method to get the most recent date on which the item changed concerning the
* the OAI deleted status (policy start and end dates for all anonymous READ * OAI deleted status (policy start and end dates for all anonymous READ
* policies and the standard last modification date) * policies and the standard last modification date)
* *
* @param item * @param item Item
* Item
* @return date * @return date
* @throws SQLException * @throws SQLException
*/ */
@@ -382,17 +406,16 @@ public class XOAI {
boolean isIndexed = this.checkIfIndexed(item); boolean isIndexed = this.checkIfIndexed(item);
/* /*
* If the item is not under embargo, it should be visible. If it is, * If the item is not under embargo, it should be visible. If it is, make it
* make it invisible if this is the first time it is indexed. For * invisible if this is the first time it is indexed. For subsequent index runs,
* subsequent index runs, keep the current status, so that if the item * keep the current status, so that if the item is embargoed again, it is
* is embargoed again, it is flagged as deleted instead and does not * flagged as deleted instead and does not just disappear, or if it is still
* just disappear, or if it is still under embargo, it won't become * under embargo, it won't become visible and be known to harvesters as deleted
* visible and be known to harvesters as deleted before it gets * before it gets disseminated for the first time. The item has to be indexed
* disseminated for the first time. The item has to be indexed directly * directly after publication even if it is still embargoed, because its
* after publication even if it is still embargoed, because its * lastModified date will not change when the embargo end date (or start date)
* lastModified date will not change when the embargo end date (or start * is reached. To circumvent this, an item which will change its status in the
* date) is reached. To circumvent this, an item which will change its * future will be marked as such.
* status in the future will be marked as such.
*/ */
boolean isPublic = isEmbargoed ? (isIndexed ? isCurrentlyVisible : false) : true; boolean isPublic = isEmbargoed ? (isIndexed ? isCurrentlyVisible : false) : true;
@@ -404,33 +427,31 @@ public class XOAI {
doc.addField("item.willChangeStatus", willChangeStatus(item)); doc.addField("item.willChangeStatus", willChangeStatus(item));
/* /*
* Mark an item as deleted not only if it is withdrawn, but also if it * Mark an item as deleted not only if it is withdrawn, but also if it is made
* is made private, because items should not simply disappear from OAI * private, because items should not simply disappear from OAI with a transient
* with a transient deletion policy. Do not set the flag for still * deletion policy. Do not set the flag for still invisible embargoed items,
* invisible embargoed items, because this will override the item.public * because this will override the item.public flag.
* flag.
*/ */
doc.addField("item.deleted", doc.addField("item.deleted",
(item.isWithdrawn() || !item.isDiscoverable() || (isEmbargoed ? isPublic : false))); (item.isWithdrawn() || !item.isDiscoverable() || (isEmbargoed ? isPublic : false)));
/* /*
* An item that is embargoed will potentially not be harvested by * An item that is embargoed will potentially not be harvested by incremental
* incremental harvesters if the from and until params do not encompass * harvesters if the from and until params do not encompass both the standard
* both the standard lastModified date and the anonymous-READ resource * lastModified date and the anonymous-READ resource policy start date. The same
* policy start date. The same is true for the end date, where * is true for the end date, where harvesters might not get a tombstone record.
* harvesters might not get a tombstone record. Therefore, consider all * Therefore, consider all relevant policy dates and the standard lastModified
* relevant policy dates and the standard lastModified date and take the * date and take the most recent of those which have already passed.
* most recent of those which have already passed.
*/ */
doc.addField("item.lastmodified", SolrUtils.getDateFormatter() doc.addField("item.lastmodified",
.format(this.getMostRecentModificationDate(item))); SolrUtils.getDateFormatter().format(this.getMostRecentModificationDate(item)));
if (item.getSubmitter() != null) { if (item.getSubmitter() != null) {
doc.addField("item.submitter", item.getSubmitter().getEmail()); doc.addField("item.submitter", item.getSubmitter().getEmail());
} }
for (Collection col: item.getCollections()) { for (Collection col : item.getCollections()) {
doc.addField("item.collections", "col_" + col.getHandle().replace("/", "_")); doc.addField("item.collections", "col_" + col.getHandle().replace("/", "_"));
} }
for (Community com : collectionsService.flatParentCommunities(context, item)) { for (Community com : collectionsService.flatParentCommunities(context, item)) {
@@ -457,8 +478,7 @@ public class XOAI {
// Message output before processing - for debugging purposes // Message output before processing - for debugging purposes
if (verbose) { if (verbose) {
println(String.format("Item %s with handle %s is about to be indexed", println(String.format("Item %s with handle %s is about to be indexed", item.getID().toString(), handle));
item.getID().toString(), handle));
} }
ByteArrayOutputStream out = new ByteArrayOutputStream(); ByteArrayOutputStream out = new ByteArrayOutputStream();
@@ -476,8 +496,7 @@ public class XOAI {
doc.addField("item.compile", out.toString()); doc.addField("item.compile", out.toString());
if (verbose) { if (verbose) {
println(String.format("Item %s with handle %s indexed", println(String.format("Item %s with handle %s indexed", item.getID().toString(), handle));
item.getID().toString(), handle));
} }
return doc; return doc;
@@ -510,12 +529,10 @@ public class XOAI {
return pub; return pub;
} }
private static boolean getKnownExplanation(Throwable t) { private static boolean getKnownExplanation(Throwable t) {
if (t instanceof ConnectException) { if (t instanceof ConnectException) {
System.err.println("Solr server (" System.err.println(
+ configurationService.getProperty("oai.solr.url", "") "Solr server (" + configurationService.getProperty("oai.solr.url", "") + ") is down, turn it on.");
+ ") is down, turn it on.");
return true; return true;
} }
@@ -544,7 +561,7 @@ public class XOAI {
} }
private static void cleanCache(XOAIItemCacheService xoaiItemCacheService, XOAICacheService xoaiCacheService) private static void cleanCache(XOAIItemCacheService xoaiItemCacheService, XOAICacheService xoaiCacheService)
throws IOException { throws IOException {
System.out.println("Purging cached OAI responses."); System.out.println("Purging cached OAI responses.");
xoaiItemCacheService.deleteAll(); xoaiItemCacheService.deleteAll();
xoaiCacheService.deleteAll(); xoaiCacheService.deleteAll();
@@ -557,10 +574,8 @@ public class XOAI {
public static void main(String[] argv) throws IOException, ConfigurationException { public static void main(String[] argv) throws IOException, ConfigurationException {
AnnotationConfigApplicationContext applicationContext = new AnnotationConfigApplicationContext(
AnnotationConfigApplicationContext applicationContext = new AnnotationConfigApplicationContext(new Class[] { new Class[] { BasicConfiguration.class });
BasicConfiguration.class
});
XOAICacheService cacheService = applicationContext.getBean(XOAICacheService.class); XOAICacheService cacheService = applicationContext.getBean(XOAICacheService.class);
XOAIItemCacheService itemCacheService = applicationContext.getBean(XOAIItemCacheService.class); XOAIItemCacheService itemCacheService = applicationContext.getBean(XOAIItemCacheService.class);
@@ -571,21 +586,19 @@ public class XOAI {
CommandLineParser parser = new DefaultParser(); CommandLineParser parser = new DefaultParser();
Options options = new Options(); Options options = new Options();
options.addOption("c", "clear", false, "Clear index before indexing"); options.addOption("c", "clear", false, "Clear index before indexing");
options.addOption("o", "optimize", false, options.addOption("o", "optimize", false, "Optimize index at the end");
"Optimize index at the end");
options.addOption("v", "verbose", false, "Verbose output"); options.addOption("v", "verbose", false, "Verbose output");
options.addOption("h", "help", false, "Shows some help"); options.addOption("h", "help", false, "Shows some help");
options.addOption("n", "number", true, "FOR DEVELOPMENT MUST DELETE"); options.addOption("n", "number", true, "FOR DEVELOPMENT MUST DELETE");
CommandLine line = parser.parse(options, argv); CommandLine line = parser.parse(options, argv);
String[] validSolrCommands = {COMMAND_IMPORT, COMMAND_CLEAN_CACHE}; String[] validSolrCommands = { COMMAND_IMPORT, COMMAND_CLEAN_CACHE };
String[] validDatabaseCommands = {COMMAND_CLEAN_CACHE, COMMAND_COMPILE_ITEMS, COMMAND_ERASE_COMPILED_ITEMS}; String[] validDatabaseCommands = { COMMAND_CLEAN_CACHE, COMMAND_COMPILE_ITEMS,
COMMAND_ERASE_COMPILED_ITEMS };
boolean solr = true; // Assuming solr by default boolean solr = true; // Assuming solr by default
solr = !("database").equals(configurationService.getProperty("oai.storage", "solr")); solr = !("database").equals(configurationService.getProperty("oai.storage", "solr"));
boolean run = false; boolean run = false;
if (line.getArgs().length > 0) { if (line.getArgs().length > 0) {
if (solr) { if (solr) {
@@ -607,10 +620,7 @@ public class XOAI {
if (COMMAND_IMPORT.equals(command)) { if (COMMAND_IMPORT.equals(command)) {
ctx = new Context(Context.Mode.READ_ONLY); ctx = new Context(Context.Mode.READ_ONLY);
XOAI indexer = new XOAI(ctx, XOAI indexer = new XOAI(ctx, line.hasOption('o'), line.hasOption('c'), line.hasOption('v'));
line.hasOption('o'),
line.hasOption('c'),
line.hasOption('v'));
applicationContext.getAutowireCapableBeanFactory().autowireBean(indexer); applicationContext.getAutowireCapableBeanFactory().autowireBean(indexer);
@@ -635,8 +645,7 @@ public class XOAI {
} }
System.out.println("OAI 2.0 manager action ended. It took " System.out.println("OAI 2.0 manager action ended. It took "
+ ((System.currentTimeMillis() - start) / 1000) + ((System.currentTimeMillis() - start) / 1000) + " seconds.");
+ " seconds.");
} else { } else {
usage(); usage();
} }
@@ -688,7 +697,7 @@ public class XOAI {
private static void usage() { private static void usage() {
boolean solr = true; // Assuming solr by default boolean solr = true; // Assuming solr by default
solr = !("database").equals(configurationService.getProperty("oai.storage","solr")); solr = !("database").equals(configurationService.getProperty("oai.storage", "solr"));
if (solr) { if (solr) {
System.out.println("OAI Manager Script"); System.out.println("OAI Manager Script");

View File

@@ -34,7 +34,8 @@ public class DiscoverFacetsConverter {
private static final Logger log = LogManager.getLogger(DiscoverFacetsConverter.class); private static final Logger log = LogManager.getLogger(DiscoverFacetsConverter.class);
private final DiscoverFacetValueConverter facetValueConverter = new DiscoverFacetValueConverter(); @Autowired
private DiscoverFacetValueConverter facetValueConverter;
@Autowired @Autowired
private SearchService searchService; private SearchService searchService;

View File

@@ -158,6 +158,7 @@
<arg>-J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED</arg> <arg>-J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED</arg>
<arg>-J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED</arg> <arg>-J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED</arg>
<arg>-J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED</arg> <arg>-J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED</arg>
<arg>-Xpkginfo:always</arg>
</compilerArgs> </compilerArgs>
<annotationProcessorPaths> <annotationProcessorPaths>
<path> <path>