Merge branch 'main' into DS-4300 to correct merge conflicts.

This commit is contained in:
Mark H. Wood
2022-05-05 09:54:38 -04:00
202 changed files with 3176 additions and 2154 deletions

View File

@@ -373,7 +373,7 @@
<dependency>
<groupId>org.hibernate.javax.persistence</groupId>
<artifactId>hibernate-jpa-2.1-api</artifactId>
<version>1.0.0.Final</version>
<version>1.0.2.Final</version>
</dependency>
<dependency>
@@ -394,7 +394,7 @@
<groupId>org.ow2.asm</groupId>
<artifactId>asm-commons</artifactId>
</exclusion>
<!-- Newer version of Bouncycastle brought in via solr-cell -->
<!-- Newer version of Bouncycastle brought in via Tika -->
<exclusion>
<groupId>org.bouncycastle</groupId>
<artifactId>bcpkix-jdk15on</artifactId>
@@ -530,18 +530,6 @@
<groupId>org.apache.pdfbox</groupId>
<artifactId>fontbox</artifactId>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-scratchpad</artifactId>
</dependency>
<dependency>
<groupId>xalan</groupId>
<artifactId>xalan</artifactId>
</dependency>
<dependency>
<groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId>
</dependency>
<dependency>
<groupId>com.ibm.icu</groupId>
<artifactId>icu4j</artifactId>
@@ -606,27 +594,13 @@
<artifactId>httpmime</artifactId>
</dependency>
<!-- SolrJ is used to communicate with Solr throughout the dspace-api -->
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-solrj</artifactId>
<version>${solr.client.version}</version>
<exclusions>
<!-- Newer Jetty version brought in via Parent POM -->
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-io</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Solr Core is needed for Integration Tests (to run a MockSolrServer) -->
<!-- Solr Core is only needed for Integration Tests (to run a MockSolrServer) -->
<!-- The following Solr / Lucene dependencies also support integration tests -->
<dependency>
<groupId>org.apache.solr</groupId>
@@ -654,39 +628,10 @@
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-cell</artifactId>
<exclusions>
<!-- Newer version brought in by opencsv -->
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
</exclusion>
<!-- Newer Jetty version brought in via Parent POM -->
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-io</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>
</dependency>
<!-- Used for full-text indexing with Solr -->
<dependency>
<groupId>org.apache.tika</groupId>
<artifactId>tika-parsers</artifactId>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-icu</artifactId>
@@ -702,9 +647,15 @@
<artifactId>lucene-analyzers-stempel</artifactId>
<scope>test</scope>
</dependency>
<!-- Tika is used to extract full text from documents in order to index in Solr -->
<dependency>
<groupId>org.apache.xmlbeans</groupId>
<artifactId>xmlbeans</artifactId>
<groupId>org.apache.tika</groupId>
<artifactId>tika-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.tika</groupId>
<artifactId>tika-parsers-standard-package</artifactId>
</dependency>
<dependency>
@@ -728,13 +679,6 @@
<version>1.1.1</version>
</dependency>
<!-- Gson: Java to Json conversion -->
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
@@ -762,7 +706,7 @@
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId>
<version>6.5.7</version>
<version>8.4.4</version>
</dependency>
<!-- Google Analytics -->
@@ -913,13 +857,6 @@
<version>2.0.0</version>
</dependency>
<dependency>
<groupId>com.github.stefanbirkner</groupId>
<artifactId>system-rules</artifactId>
<version>1.19.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mock-server</groupId>
<artifactId>mockserver-junit-rule</artifactId>

View File

@@ -11,13 +11,16 @@ import java.io.IOException;
import java.sql.SQLException;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
@@ -90,7 +93,7 @@ public class MetadataImporter {
public static void main(String[] args)
throws ParseException, SQLException, IOException, TransformerException,
ParserConfigurationException, AuthorizeException, SAXException,
NonUniqueMetadataException, RegistryImportException {
NonUniqueMetadataException, RegistryImportException, XPathExpressionException {
// create an options object and populate it
CommandLineParser parser = new DefaultParser();
@@ -124,8 +127,8 @@ public class MetadataImporter {
* @throws RegistryImportException if import fails
*/
public static void loadRegistry(String file, boolean forceUpdate)
throws SQLException, IOException, TransformerException, ParserConfigurationException,
AuthorizeException, SAXException, NonUniqueMetadataException, RegistryImportException {
throws SQLException, IOException, TransformerException, ParserConfigurationException, AuthorizeException,
SAXException, NonUniqueMetadataException, RegistryImportException, XPathExpressionException {
Context context = null;
try {
@@ -137,7 +140,9 @@ public class MetadataImporter {
Document document = RegistryImporter.loadXML(file);
// Get the nodes corresponding to types
NodeList schemaNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-schema");
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList schemaNodes = (NodeList) xPath.compile("/dspace-dc-types/dc-schema")
.evaluate(document, XPathConstants.NODESET);
// Add each one as a new format to the registry
for (int i = 0; i < schemaNodes.getLength(); i++) {
@@ -146,7 +151,8 @@ public class MetadataImporter {
}
// Get the nodes corresponding to types
NodeList typeNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-type");
NodeList typeNodes = (NodeList) xPath.compile("/dspace-dc-types/dc-type")
.evaluate(document, XPathConstants.NODESET);
// Add each one as a new format to the registry
for (int i = 0; i < typeNodes.getLength(); i++) {
@@ -178,8 +184,8 @@ public class MetadataImporter {
* @throws RegistryImportException if import fails
*/
private static void loadSchema(Context context, Node node, boolean updateExisting)
throws SQLException, IOException, TransformerException,
AuthorizeException, NonUniqueMetadataException, RegistryImportException {
throws SQLException, AuthorizeException, NonUniqueMetadataException, RegistryImportException,
XPathExpressionException {
// Get the values
String name = RegistryImporter.getElementData(node, "name");
String namespace = RegistryImporter.getElementData(node, "namespace");
@@ -236,8 +242,8 @@ public class MetadataImporter {
* @throws RegistryImportException if import fails
*/
private static void loadType(Context context, Node node)
throws SQLException, IOException, TransformerException,
AuthorizeException, NonUniqueMetadataException, RegistryImportException {
throws SQLException, IOException, AuthorizeException, NonUniqueMetadataException, RegistryImportException,
XPathExpressionException {
// Get the values
String schema = RegistryImporter.getElementData(node, "schema");
String element = RegistryImporter.getElementData(node, "element");

View File

@@ -13,8 +13,11 @@ import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.xpath.XPathAPI;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
@@ -72,9 +75,10 @@ public class RegistryImporter {
* @throws TransformerException if error
*/
public static String getElementData(Node parentElement, String childName)
throws TransformerException {
throws XPathExpressionException {
// Grab the child node
Node childNode = XPathAPI.selectSingleNode(parentElement, childName);
XPath xPath = XPathFactory.newInstance().newXPath();
Node childNode = (Node) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODE);
if (childNode == null) {
// No child node, so no values
@@ -115,9 +119,10 @@ public class RegistryImporter {
* @throws TransformerException if error
*/
public static String[] getRepeatedElementData(Node parentElement,
String childName) throws TransformerException {
String childName) throws XPathExpressionException {
// Grab the child node
NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName);
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList childNodes = (NodeList) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODESET);
String[] data = new String[childNodes.getLength()];

View File

@@ -16,9 +16,12 @@ import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.logging.log4j.Logger;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.factory.ContentServiceFactory;
@@ -122,12 +125,13 @@ public class RegistryLoader {
*/
public static void loadBitstreamFormats(Context context, String filename)
throws SQLException, IOException, ParserConfigurationException,
SAXException, TransformerException, AuthorizeException {
SAXException, TransformerException, AuthorizeException, XPathExpressionException {
Document document = loadXML(filename);
// Get the nodes corresponding to formats
NodeList typeNodes = XPathAPI.selectNodeList(document,
"dspace-bitstream-types/bitstream-type");
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList typeNodes = (NodeList) xPath.compile("dspace-bitstream-types/bitstream-type")
.evaluate(document, XPathConstants.NODESET);
// Add each one as a new format to the registry
for (int i = 0; i < typeNodes.getLength(); i++) {
@@ -151,8 +155,7 @@ public class RegistryLoader {
* @throws AuthorizeException if authorization error
*/
private static void loadFormat(Context context, Node node)
throws SQLException, IOException, TransformerException,
AuthorizeException {
throws SQLException, AuthorizeException, XPathExpressionException {
// Get the values
String mimeType = getElementData(node, "mimetype");
String shortDesc = getElementData(node, "short_description");
@@ -231,9 +234,10 @@ public class RegistryLoader {
* @throws TransformerException if transformer error
*/
private static String getElementData(Node parentElement, String childName)
throws TransformerException {
throws XPathExpressionException {
// Grab the child node
Node childNode = XPathAPI.selectSingleNode(parentElement, childName);
XPath xPath = XPathFactory.newInstance().newXPath();
Node childNode = (Node) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODE);
if (childNode == null) {
// No child node, so no values
@@ -274,9 +278,10 @@ public class RegistryLoader {
* @throws TransformerException if transformer error
*/
private static String[] getRepeatedElementData(Node parentElement,
String childName) throws TransformerException {
String childName) throws XPathExpressionException {
// Grab the child node
NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName);
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList childNodes = (NodeList) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODESET);
String[] data = new String[childNodes.getLength()];

View File

@@ -30,6 +30,10 @@ import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
@@ -39,7 +43,6 @@ import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
@@ -141,10 +144,11 @@ public class StructBuilder {
* @throws SQLException passed through.
* @throws FileNotFoundException if input or output could not be opened.
* @throws TransformerException if the input document is invalid.
* @throws XPathExpressionException passed through.
*/
public static void main(String[] argv)
throws ParserConfigurationException, SQLException,
FileNotFoundException, IOException, TransformerException {
IOException, TransformerException, XPathExpressionException {
// Define command line options.
Options options = new Options();
@@ -251,7 +255,7 @@ public class StructBuilder {
static void importStructure(Context context, InputStream input,
OutputStream output, boolean keepHandles)
throws IOException, ParserConfigurationException, SQLException,
TransformerException {
TransformerException, XPathExpressionException {
// load the XML
Document document = null;
@@ -269,13 +273,15 @@ public class StructBuilder {
// is properly structured.
try {
validate(document);
} catch (TransformerException ex) {
} catch (XPathExpressionException ex) {
System.err.format("The input document is invalid: %s%n", ex.getMessage());
System.exit(1);
}
// Check for 'identifier' attributes -- possibly output by this class.
NodeList identifierNodes = XPathAPI.selectNodeList(document, "//*[@identifier]");
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList identifierNodes = (NodeList) xPath.compile("//*[@identifier]")
.evaluate(document, XPathConstants.NODESET);
if (identifierNodes.getLength() > 0) {
if (!keepHandles) {
System.err.println("The input document has 'identifier' attributes, which will be ignored.");
@@ -310,7 +316,8 @@ public class StructBuilder {
Element[] elements = new Element[]{};
try {
// get the top level community list
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
NodeList first = (NodeList) xPath.compile("/import_structure/community")
.evaluate(document, XPathConstants.NODESET);
// run the import starting with the top level communities
elements = handleCommunities(context, first, null, keepHandles);
@@ -479,14 +486,16 @@ public class StructBuilder {
* @throws TransformerException if transformer error
*/
private static void validate(org.w3c.dom.Document document)
throws TransformerException {
throws XPathExpressionException {
StringBuilder err = new StringBuilder();
boolean trip = false;
err.append("The following errors were encountered parsing the source XML.\n");
err.append("No changes have been made to the DSpace instance.\n\n");
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList first = (NodeList) xPath.compile("/import_structure/community")
.evaluate(document, XPathConstants.NODESET);
if (first.getLength() == 0) {
err.append("-There are no top level communities in the source document.");
System.out.println(err.toString());
@@ -516,14 +525,15 @@ public class StructBuilder {
* no errors.
*/
private static String validateCommunities(NodeList communities, int level)
throws TransformerException {
throws XPathExpressionException {
StringBuilder err = new StringBuilder();
boolean trip = false;
String errs = null;
XPath xPath = XPathFactory.newInstance().newXPath();
for (int i = 0; i < communities.getLength(); i++) {
Node n = communities.item(i);
NodeList name = XPathAPI.selectNodeList(n, "name");
NodeList name = (NodeList) xPath.compile("name").evaluate(n, XPathConstants.NODESET);
if (name.getLength() != 1) {
String pos = Integer.toString(i + 1);
err.append("-The level ").append(level)
@@ -533,7 +543,7 @@ public class StructBuilder {
}
// validate sub communities
NodeList subCommunities = XPathAPI.selectNodeList(n, "community");
NodeList subCommunities = (NodeList) xPath.compile("community").evaluate(n, XPathConstants.NODESET);
String comErrs = validateCommunities(subCommunities, level + 1);
if (comErrs != null) {
err.append(comErrs);
@@ -541,7 +551,7 @@ public class StructBuilder {
}
// validate collections
NodeList collections = XPathAPI.selectNodeList(n, "collection");
NodeList collections = (NodeList) xPath.compile("collection").evaluate(n, XPathConstants.NODESET);
String colErrs = validateCollections(collections, level + 1);
if (colErrs != null) {
err.append(colErrs);
@@ -565,14 +575,15 @@ public class StructBuilder {
* @return the errors to be generated by the calling method, or null if none
*/
private static String validateCollections(NodeList collections, int level)
throws TransformerException {
throws XPathExpressionException {
StringBuilder err = new StringBuilder();
boolean trip = false;
String errs = null;
XPath xPath = XPathFactory.newInstance().newXPath();
for (int i = 0; i < collections.getLength(); i++) {
Node n = collections.item(i);
NodeList name = XPathAPI.selectNodeList(n, "name");
NodeList name = (NodeList) xPath.compile("name").evaluate(n, XPathConstants.NODESET);
if (name.getLength() != 1) {
String pos = Integer.toString(i + 1);
err.append("-The level ").append(level)
@@ -638,8 +649,10 @@ public class StructBuilder {
*/
private static Element[] handleCommunities(Context context, NodeList communities,
Community parent, boolean keepHandles)
throws TransformerException, SQLException, AuthorizeException {
throws TransformerException, SQLException, AuthorizeException,
XPathExpressionException {
Element[] elements = new Element[communities.getLength()];
XPath xPath = XPathFactory.newInstance().newXPath();
for (int i = 0; i < communities.getLength(); i++) {
Node tn = communities.item(i);
@@ -661,7 +674,7 @@ public class StructBuilder {
// now update the metadata
for (Map.Entry<String, MetadataFieldName> entry : communityMap.entrySet()) {
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET);
if (nl.getLength() == 1) {
communityService.setMetadataSingleValue(context, community,
entry.getValue(), null, getStringValue(nl.item(0)));
@@ -728,12 +741,14 @@ public class StructBuilder {
}
// handle sub communities
NodeList subCommunities = XPathAPI.selectNodeList(tn, "community");
NodeList subCommunities = (NodeList) xPath.compile("community")
.evaluate(tn, XPathConstants.NODESET);
Element[] subCommunityElements = handleCommunities(context,
subCommunities, community, keepHandles);
// handle collections
NodeList collections = XPathAPI.selectNodeList(tn, "collection");
NodeList collections = (NodeList) xPath.compile("collection")
.evaluate(tn, XPathConstants.NODESET);
Element[] collectionElements = handleCollections(context,
collections, community, keepHandles);
@@ -762,8 +777,9 @@ public class StructBuilder {
*/
private static Element[] handleCollections(Context context,
NodeList collections, Community parent, boolean keepHandles)
throws TransformerException, SQLException, AuthorizeException {
throws SQLException, AuthorizeException, XPathExpressionException {
Element[] elements = new Element[collections.getLength()];
XPath xPath = XPathFactory.newInstance().newXPath();
for (int i = 0; i < collections.getLength(); i++) {
Node tn = collections.item(i);
@@ -785,7 +801,7 @@ public class StructBuilder {
// import the rest of the metadata
for (Map.Entry<String, MetadataFieldName> entry : collectionMap.entrySet()) {
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET);
if (nl.getLength() == 1) {
collectionService.setMetadataSingleValue(context, collection,
entry.getValue(), null, getStringValue(nl.item(0)));

View File

@@ -41,10 +41,8 @@ public class MetadataDeletionScriptConfiguration<T extends MetadataDeletion> ext
Options options = new Options();
options.addOption("m", "metadata", true, "metadata field name");
options.getOption("m").setType(String.class);
options.addOption("l", "list", false, "lists the metadata fields that can be deleted");
options.getOption("l").setType(boolean.class);
super.options = options;
}

View File

@@ -54,12 +54,9 @@ public class MetadataExportScriptConfiguration<T extends MetadataExport> extends
Options options = new Options();
options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)");
options.getOption("i").setType(String.class);
options.addOption("a", "all", false,
"include all metadata fields that are not normally changed (e.g. provenance)");
options.getOption("a").setType(boolean.class);
options.addOption("h", "help", false, "help");
options.getOption("h").setType(boolean.class);
super.options = options;

View File

@@ -19,7 +19,6 @@ public class MetadataImportCliScriptConfiguration extends MetadataImportScriptCo
public Options getOptions() {
Options options = super.getOptions();
options.addOption("e", "email", true, "email address or user id of user (required if adding new items)");
options.getOption("e").setType(String.class);
options.getOption("e").setRequired(true);
super.options = options;
return options;

View File

@@ -59,20 +59,14 @@ public class MetadataImportScriptConfiguration<T extends MetadataImport> extends
options.getOption("f").setRequired(true);
options.addOption("s", "silent", false,
"silent operation - doesn't request confirmation of changes USE WITH CAUTION");
options.getOption("s").setType(boolean.class);
options.addOption("w", "workflow", false, "workflow - when adding new items, use collection workflow");
options.getOption("w").setType(boolean.class);
options.addOption("n", "notify", false,
"notify - when adding new items using a workflow, send notification emails");
options.getOption("n").setType(boolean.class);
options.addOption("v", "validate-only", false,
"validate - just validate the csv, don't run the import");
options.getOption("v").setType(boolean.class);
options.addOption("t", "template", false,
"template - when adding new items, use the collection template (if it exists)");
options.getOption("t").setType(boolean.class);
options.addOption("h", "help", false, "help");
options.getOption("h").setType(boolean.class);
super.options = options;
}

View File

@@ -43,22 +43,14 @@ public class HarvestScriptConfiguration<T extends Harvest> extends ScriptConfigu
public Options getOptions() {
Options options = new Options();
options.addOption("p", "purge", false, "delete all items in the collection");
options.getOption("p").setType(boolean.class);
options.addOption("r", "run", false, "run the standard harvest procedure");
options.getOption("r").setType(boolean.class);
options.addOption("g", "ping", false, "test the OAI server and set");
options.getOption("g").setType(boolean.class);
options.addOption("s", "setup", false, "Set the collection up for harvesting");
options.getOption("s").setType(boolean.class);
options.addOption("S", "start", false, "start the harvest loop");
options.getOption("S").setType(boolean.class);
options.addOption("R", "reset", false, "reset harvest status on all collections");
options.getOption("R").setType(boolean.class);
options.addOption("P", "purgeCollections", false, "purge all harvestable collections");
options.getOption("P").setType(boolean.class);
options.addOption("o", "reimport", false, "reimport all items in the collection, " +
"this is equivalent to -p -r, purging all items in a collection and reimporting them");
options.getOption("o").setType(boolean.class);
options.addOption("c", "collection", true,
"harvesting collection (handle or id)");
options.addOption("t", "type", true,
@@ -72,7 +64,6 @@ public class HarvestScriptConfiguration<T extends Harvest> extends ScriptConfigu
"crosswalk in dspace.cfg");
options.addOption("h", "help", false, "help");
options.getOption("h").setType(boolean.class);
return options;
}

View File

@@ -51,6 +51,10 @@ import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.commons.collections4.ComparatorUtils;
import org.apache.commons.io.FileDeleteStrategy;
@@ -59,7 +63,6 @@ import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.logging.log4j.Logger;
import org.apache.xpath.XPathAPI;
import org.dspace.app.itemimport.service.ItemImportService;
import org.dspace.app.util.LocalSchemaFilenameFilter;
import org.dspace.app.util.RelationshipUtils;
@@ -863,7 +866,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
// Load all metadata schemas into the item.
protected void loadMetadata(Context c, Item myitem, String path)
throws SQLException, IOException, ParserConfigurationException,
SAXException, TransformerException, AuthorizeException {
SAXException, TransformerException, AuthorizeException, XPathExpressionException {
// Load the dublin core metadata
loadDublinCore(c, myitem, path + "dublin_core.xml");
@@ -877,14 +880,15 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
protected void loadDublinCore(Context c, Item myitem, String filename)
throws SQLException, IOException, ParserConfigurationException,
SAXException, TransformerException, AuthorizeException {
SAXException, TransformerException, AuthorizeException, XPathExpressionException {
Document document = loadXML(filename);
// Get the schema, for backward compatibility we will default to the
// dublin core schema if the schema name is not available in the import
// file
String schema;
NodeList metadata = XPathAPI.selectNodeList(document, "/dublin_core");
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList metadata = (NodeList) xPath.compile("/dublin_core").evaluate(document, XPathConstants.NODESET);
Node schemaAttr = metadata.item(0).getAttributes().getNamedItem(
"schema");
if (schemaAttr == null) {
@@ -894,8 +898,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
}
// Get the nodes corresponding to formats
NodeList dcNodes = XPathAPI.selectNodeList(document,
"/dublin_core/dcvalue");
NodeList dcNodes = (NodeList) xPath.compile("/dublin_core/dcvalue").evaluate(document, XPathConstants.NODESET);
if (!isQuiet) {
System.out.println("\tLoading dublin core from " + filename);

View File

@@ -27,10 +27,12 @@ import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.commons.lang3.StringUtils;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
@@ -170,24 +172,21 @@ public class MetadataUtilities {
* @param docBuilder DocumentBuilder
* @param is - InputStream of dublin_core.xml
* @return list of DtoMetadata representing the metadata fields relating to an Item
* @throws SQLException if database error
* @throws IOException if IO error
* @throws ParserConfigurationException if parser config error
* @throws SAXException if XML error
* @throws TransformerException if transformer error
* @throws AuthorizeException if authorization error
*/
public static List<DtoMetadata> loadDublinCore(DocumentBuilder docBuilder, InputStream is)
throws SQLException, IOException, ParserConfigurationException,
SAXException, TransformerException, AuthorizeException {
throws IOException, XPathExpressionException, SAXException {
Document document = docBuilder.parse(is);
List<DtoMetadata> dtomList = new ArrayList<DtoMetadata>();
// Get the schema, for backward compatibility we will default to the
// dublin core schema if the schema name is not available in the import file
String schema = null;
NodeList metadata = XPathAPI.selectNodeList(document, "/dublin_core");
String schema;
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList metadata = (NodeList) xPath.compile("/dublin_core").evaluate(document, XPathConstants.NODESET);
Node schemaAttr = metadata.item(0).getAttributes().getNamedItem("schema");
if (schemaAttr == null) {
schema = MetadataSchemaEnum.DC.getName();
@@ -196,7 +195,7 @@ public class MetadataUtilities {
}
// Get the nodes corresponding to formats
NodeList dcNodes = XPathAPI.selectNodeList(document, "/dublin_core/dcvalue");
NodeList dcNodes = (NodeList) xPath.compile("/dublin_core/dcvalue").evaluate(document, XPathConstants.NODESET);
for (int i = 0; i < dcNodes.getLength(); i++) {
Node n = dcNodes.item(i);

View File

@@ -1,99 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.IOUtils;
import org.apache.logging.log4j.Logger;
import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory;
import org.apache.poi.hssf.extractor.ExcelExtractor;
import org.apache.poi.xssf.extractor.XSSFExcelExtractor;
import org.dspace.content.Item;
/*
* ExcelFilter
*
* Entries you must add to dspace.cfg:
*
* filter.plugins = blah, \
* Excel Text Extractor
*
* plugin.named.org.dspace.app.mediafilter.FormatFilter = \
* blah = blah, \
* org.dspace.app.mediafilter.ExcelFilter = Excel Text Extractor
*
* #Configure each filter's input Formats
* filter.org.dspace.app.mediafilter.ExcelFilter.inputFormats = Microsoft Excel, Microsoft Excel XML
*
*/
public class ExcelFilter extends MediaFilter {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(ExcelFilter.class);
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";
}
/**
* @return String bundle name
*/
public String getBundleName() {
return "TEXT";
}
/**
* @return String bitstream format
*/
public String getFormatString() {
return "Text";
}
/**
* @return String description
*/
public String getDescription() {
return "Extracted text";
}
/**
* @param item item
* @param source source input stream
* @param verbose verbose mode
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item item, InputStream source, boolean verbose)
throws Exception {
String extractedText = null;
try {
POITextExtractor theExtractor = ExtractorFactory.createExtractor(source);
if (theExtractor instanceof ExcelExtractor) {
// for xls file
extractedText = (theExtractor).getText();
} else if (theExtractor instanceof XSSFExcelExtractor) {
// for xlsx file
extractedText = (theExtractor).getText();
}
} catch (Exception e) {
log.error("Error filtering bitstream: " + e.getMessage(), e);
throw e;
}
if (extractedText != null) {
// generate an input stream with the extracted text
return IOUtils.toInputStream(extractedText, StandardCharsets.UTF_8);
}
return null;
}
}

View File

@@ -1,82 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import javax.swing.text.Document;
import javax.swing.text.html.HTMLEditorKit;
import org.dspace.content.Item;
/*
*
* to do: helpful error messages - can't find mediafilter.cfg - can't
* instantiate filter - bitstream format doesn't exist
*
*/
public class HTMLFilter extends MediaFilter {
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";
}
/**
* @return String bundle name
*/
@Override
public String getBundleName() {
return "TEXT";
}
/**
* @return String bitstream format
*/
@Override
public String getFormatString() {
return "Text";
}
/**
* @return String description
*/
@Override
public String getDescription() {
return "Extracted text";
}
/**
* @param currentItem item
* @param source source input stream
* @param verbose verbose mode
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
// try and read the document - set to ignore character set directive,
// assuming that the input stream is already set properly (I hope)
HTMLEditorKit kit = new HTMLEditorKit();
Document doc = kit.createDefaultDocument();
doc.putProperty("IgnoreCharsetDirective", Boolean.TRUE);
kit.read(source, doc, 0);
String extractedText = doc.getText(0, doc.getLength());
// generate an input stream with the extracted text
byte[] textBytes = extractedText.getBytes(StandardCharsets.UTF_8);
ByteArrayInputStream bais = new ByteArrayInputStream(textBytes);
return bais;
}
}

View File

@@ -50,15 +50,11 @@ public class MediaFilterScriptConfiguration<T extends MediaFilterScript> extends
public Options getOptions() {
Options options = new Options();
options.addOption("v", "verbose", false, "print all extracted text and other details to STDOUT");
options.getOption("v").setType(boolean.class);
options.addOption("q", "quiet", false, "do not print anything except in the event of errors.");
options.getOption("q").setType(boolean.class);
options.addOption("f", "force", false, "force all bitstreams to be processed");
options.getOption("f").setType(boolean.class);
options.addOption("i", "identifier", true, "ONLY process bitstreams belonging to identifier");
options.addOption("m", "maximum", true, "process no more than maximum items");
options.addOption("h", "help", false, "help");
options.getOption("h").setType(boolean.class);
Option pluginOption = Option.builder("p")
.longOpt("plugins")

View File

@@ -1,137 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import org.apache.logging.log4j.Logger;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.encryption.InvalidPasswordException;
import org.apache.pdfbox.text.PDFTextStripper;
import org.dspace.content.Item;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
/*
*
* to do: helpful error messages - can't find mediafilter.cfg - can't
* instantiate filter - bitstream format doesn't exist
*
*/
public class PDFFilter extends MediaFilter {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PDFFilter.class);
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";
}
/**
* @return String bundle name
*/
@Override
public String getBundleName() {
return "TEXT";
}
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString() {
return "Text";
}
/**
* @return String description
*/
@Override
public String getDescription() {
return "Extracted text";
}
/**
* @param currentItem item
* @param source source input stream
* @param verbose verbose mode
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
try {
boolean useTemporaryFile = configurationService.getBooleanProperty("pdffilter.largepdfs", false);
// get input stream from bitstream
// pass to filter, get string back
PDFTextStripper pts = new PDFTextStripper();
pts.setSortByPosition(true);
PDDocument pdfDoc = null;
Writer writer = null;
File tempTextFile = null;
ByteArrayOutputStream byteStream = null;
if (useTemporaryFile) {
tempTextFile = File.createTempFile("dspacepdfextract" + source.hashCode(), ".txt");
tempTextFile.deleteOnExit();
writer = new OutputStreamWriter(new FileOutputStream(tempTextFile));
} else {
byteStream = new ByteArrayOutputStream();
writer = new OutputStreamWriter(byteStream);
}
try {
pdfDoc = PDDocument.load(source);
pts.writeText(pdfDoc, writer);
} catch (InvalidPasswordException ex) {
log.error("PDF is encrypted. Cannot extract text (item: {})",
() -> currentItem.getHandle());
return null;
} finally {
try {
if (pdfDoc != null) {
pdfDoc.close();
}
} catch (Exception e) {
log.error("Error closing PDF file: " + e.getMessage(), e);
}
try {
writer.close();
} catch (Exception e) {
log.error("Error closing temporary extract file: " + e.getMessage(), e);
}
}
if (useTemporaryFile) {
return new FileInputStream(tempTextFile);
} else {
byte[] bytes = byteStream.toByteArray();
return new ByteArrayInputStream(bytes);
}
} catch (OutOfMemoryError oome) {
log.error("Error parsing PDF document " + oome.getMessage(), oome);
if (!configurationService.getBooleanProperty("pdffilter.skiponmemoryexception", false)) {
throw oome;
}
}
return null;
}
}

View File

@@ -1,72 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory;
import org.apache.poi.openxml4j.exceptions.OpenXML4JException;
import org.apache.xmlbeans.XmlException;
import org.dspace.content.Item;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Extract flat text from Microsoft Word documents (.doc, .docx).
*/
public class PoiWordFilter
extends MediaFilter {
private static final Logger LOG = LoggerFactory.getLogger(PoiWordFilter.class);
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";
}
@Override
public String getBundleName() {
return "TEXT";
}
@Override
public String getFormatString() {
return "Text";
}
@Override
public String getDescription() {
return "Extracted text";
}
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
String text;
try {
// get input stream from bitstream, pass to filter, get string back
POITextExtractor extractor = ExtractorFactory.createExtractor(source);
text = extractor.getText();
} catch (IOException | OpenXML4JException | XmlException e) {
System.err.format("Invalid File Format: %s%n", e.getMessage());
LOG.error("Unable to parse the bitstream: ", e);
throw e;
}
// if verbose flag is set, print out extracted text to STDOUT
if (verbose) {
System.out.println(text);
}
// return the extracted text as a stream.
return new ByteArrayInputStream(text.getBytes(StandardCharsets.UTF_8));
}
}

View File

@@ -1,113 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import org.apache.logging.log4j.Logger;
import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory;
import org.apache.poi.hslf.extractor.PowerPointExtractor;
import org.apache.poi.xslf.extractor.XSLFPowerPointExtractor;
import org.dspace.content.Item;
/*
* TODO: Allow user to configure extraction of only text or only notes
*
*/
public class PowerPointFilter extends MediaFilter {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PowerPointFilter.class);
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";
}
/**
* @return String bundle name
*/
@Override
public String getBundleName() {
return "TEXT";
}
/**
* @return String bitstream format
*
* TODO: Check that this is correct
*/
@Override
public String getFormatString() {
return "Text";
}
/**
* @return String description
*/
@Override
public String getDescription() {
return "Extracted text";
}
/**
* @param currentItem item
* @param source source input stream
* @param verbose verbose mode
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
try {
String extractedText = null;
new ExtractorFactory();
POITextExtractor pptExtractor = ExtractorFactory
.createExtractor(source);
// PowerPoint XML files and legacy format PowerPoint files
// require different classes and APIs for text extraction
// If this is a PowerPoint XML file, extract accordingly
if (pptExtractor instanceof XSLFPowerPointExtractor) {
// The true method arguments indicate that text from
// the slides and the notes is desired
extractedText = ((XSLFPowerPointExtractor) pptExtractor)
.getText(true, true);
} else if (pptExtractor instanceof PowerPointExtractor) { // Legacy PowerPoint files
extractedText = ((PowerPointExtractor) pptExtractor).getText()
+ " " + ((PowerPointExtractor) pptExtractor).getNotes();
}
if (extractedText != null) {
// if verbose flag is set, print out extracted text
// to STDOUT
if (verbose) {
System.out.println(extractedText);
}
// generate an input stream with the extracted text
byte[] textBytes = extractedText.getBytes();
ByteArrayInputStream bais = new ByteArrayInputStream(textBytes);
return bais;
}
} catch (Exception e) {
log.error("Error filtering bitstream: " + e.getMessage(), e);
throw e;
}
return null;
}
}

View File

@@ -0,0 +1,183 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.tika.Tika;
import org.apache.tika.exception.TikaException;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.parser.AutoDetectParser;
import org.apache.tika.sax.BodyContentHandler;
import org.apache.tika.sax.ContentHandlerDecorator;
import org.dspace.content.Item;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.xml.sax.SAXException;
/**
* Text Extraction media filter which uses Apache Tika to extract text from a large number of file formats (including
* all Microsoft formats, PDF, HTML, Text, etc). For a more complete list of file formats supported by Tika see the
* Tika documentation: https://tika.apache.org/2.3.0/formats.html
*/
public class TikaTextExtractionFilter
extends MediaFilter {
private final static Logger log = LogManager.getLogger();
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";
}
@Override
public String getBundleName() {
return "TEXT";
}
@Override
public String getFormatString() {
return "Text";
}
@Override
public String getDescription() {
return "Extracted text";
}
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
boolean useTemporaryFile = configurationService.getBooleanProperty("textextractor.use-temp-file", false);
if (useTemporaryFile) {
// Extract text out of source file using a temp file, returning results as InputStream
return extractUsingTempFile(source, verbose);
}
// Not using temporary file. We'll use Tika's default in-memory parsing.
// Get maximum characters to extract. Default is 100,000 chars, which is also Tika's default setting.
String extractedText;
int maxChars = configurationService.getIntProperty("textextractor.max-chars", 100000);
try {
// Use Tika to extract text from input. Tika will automatically detect the file type.
Tika tika = new Tika();
tika.setMaxStringLength(maxChars); // Tell Tika the maximum number of characters to extract
extractedText = tika.parseToString(source);
} catch (IOException e) {
System.err.format("Unable to extract text from bitstream in Item %s%n", currentItem.getID().toString());
e.printStackTrace();
log.error("Unable to extract text from bitstream in Item {}", currentItem.getID().toString(), e);
throw e;
} catch (OutOfMemoryError oe) {
System.err.format("OutOfMemoryError occurred when extracting text from bitstream in Item %s. " +
"You may wish to enable 'textextractor.use-temp-file'.%n", currentItem.getID().toString());
oe.printStackTrace();
log.error("OutOfMemoryError occurred when extracting text from bitstream in Item {}. " +
"You may wish to enable 'textextractor.use-temp-file'.", currentItem.getID().toString(), oe);
throw oe;
}
if (StringUtils.isNotEmpty(extractedText)) {
// if verbose flag is set, print out extracted text to STDOUT
if (verbose) {
System.out.println("(Verbose mode) Extracted text:");
System.out.println(extractedText);
}
// return the extracted text as a UTF-8 stream.
return new ByteArrayInputStream(extractedText.getBytes(StandardCharsets.UTF_8));
}
return null;
}
/**
* Extracts the text out of a given source InputStream, using a temporary file. This decreases the amount of memory
* necessary for text extraction, but can be slower as it requires writing extracted text to a temporary file.
* @param source source InputStream
* @param verbose verbose mode enabled/disabled
* @return InputStream for temporary file containing extracted text
* @throws IOException
* @throws SAXException
* @throws TikaException
*/
private InputStream extractUsingTempFile(InputStream source, boolean verbose)
throws IOException, TikaException, SAXException {
File tempExtractedTextFile = File.createTempFile("dspacetextextract" + source.hashCode(), ".txt");
if (verbose) {
System.out.println("(Verbose mode) Extracted text was written to temporary file at " +
tempExtractedTextFile.getAbsolutePath());
} else {
tempExtractedTextFile.deleteOnExit();
}
// Open temp file for writing
try (FileWriter writer = new FileWriter(tempExtractedTextFile, StandardCharsets.UTF_8)) {
// Initialize a custom ContentHandlerDecorator which is a BodyContentHandler.
// This mimics the behavior of Tika().parseToString(), which only extracts text from the body of the file.
// This custom Handler writes any extracted text to the temp file.
ContentHandlerDecorator handler = new BodyContentHandler(new ContentHandlerDecorator() {
/**
* Write all extracted characters directly to the temp file.
*/
@Override
public void characters(char[] ch, int start, int length) throws SAXException {
try {
writer.append(new String(ch), start, length);
} catch (IOException e) {
String errorMsg = String.format("Could not append to temporary file at %s " +
"when performing text extraction",
tempExtractedTextFile.getAbsolutePath());
log.error(errorMsg, e);
throw new SAXException(errorMsg, e);
}
}
/**
* Write all ignorable whitespace directly to the temp file.
* This mimics the behaviour of Tika().parseToString() which extracts ignorableWhitespace characters
* (like blank lines, indentations, etc.), so that we get the same extracted text either way.
*/
@Override
public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException {
try {
writer.append(new String(ch), start, length);
} catch (IOException e) {
String errorMsg = String.format("Could not append to temporary file at %s " +
"when performing text extraction",
tempExtractedTextFile.getAbsolutePath());
log.error(errorMsg, e);
throw new SAXException(errorMsg, e);
}
}
});
AutoDetectParser parser = new AutoDetectParser();
Metadata metadata = new Metadata();
// parse our source InputStream using the above custom handler
parser.parse(source, handler, metadata);
}
// At this point, all extracted text is written to our temp file. So, return a FileInputStream for that file
return new FileInputStream(tempExtractedTextFile);
}
}

View File

@@ -21,7 +21,8 @@ import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.SolrAuthorityInterface;
import org.dspace.external.OrcidRestConnector;
@@ -40,7 +41,7 @@ import org.orcid.jaxb.model.v3.release.search.Result;
*/
public class Orcidv3SolrAuthorityImpl implements SolrAuthorityInterface {
private static Logger log = Logger.getLogger(Orcidv3SolrAuthorityImpl.class);
private final static Logger log = LogManager.getLogger();
private OrcidRestConnector orcidRestConnector;
private String OAUTHUrl;

View File

@@ -14,11 +14,12 @@ import java.util.Iterator;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.logging.log4j.Logger;
import org.apache.xpath.XPathAPI;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
@@ -62,36 +63,26 @@ public class XMLUtils {
/**
* @param xml The starting context (a Node or a Document, for example).
* @param NodeListXPath xpath
* @param nodeListXPath xpath
* @return A Node matches the NodeListXPath
* null if nothing matches the NodeListXPath
* @throws XPathExpressionException if xpath error
*/
public static Node getNode(Node xml, String NodeListXPath) throws XPathExpressionException {
Node result = null;
try {
result = XPathAPI.selectSingleNode(xml, NodeListXPath);
} catch (TransformerException e) {
log.error("Error", e);
}
return result;
public static Node getNode(Node xml, String nodeListXPath) throws XPathExpressionException {
XPath xPath = XPathFactory.newInstance().newXPath();
return (Node) xPath.compile(nodeListXPath).evaluate(xml, XPathConstants.NODE);
}
/**
* @param xml The starting context (a Node or a Document, for example).
* @param NodeListXPath xpath
* @param nodeListXPath xpath
* @return A NodeList containing the nodes that match the NodeListXPath
* null if nothing matches the NodeListXPath
* @throws XPathExpressionException if xpath error
*/
public static NodeList getNodeList(Node xml, String NodeListXPath) throws XPathExpressionException {
NodeList nodeList = null;
try {
nodeList = XPathAPI.selectNodeList(xml, NodeListXPath);
} catch (TransformerException e) {
log.error("Error", e);
}
return nodeList;
public static NodeList getNodeList(Node xml, String nodeListXPath) throws XPathExpressionException {
XPath xPath = XPathFactory.newInstance().newXPath();
return (NodeList) xPath.compile(nodeListXPath).evaluate(xml, XPathConstants.NODESET);
}
public static Iterator<Node> getNodeListIterator(Node xml, String NodeListXPath) throws XPathExpressionException {

View File

@@ -93,7 +93,7 @@ public class ResourcePolicy implements ReloadableEntity<Integer> {
private String rptype;
@Lob
@Type(type = "org.hibernate.type.MaterializedClobType")
@Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType")
@Column(name = "rpdescription")
private String rpdescription;

View File

@@ -11,6 +11,7 @@ import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.Community;
@@ -202,6 +203,8 @@ public class BrowseEngine {
// get the table name that we are going to be getting our data from
dao.setTable(browseIndex.getTableName());
dao.setStartsWith(StringUtils.lowerCase(scope.getStartsWith()));
// tell the browse query whether we are ascending or descending on the value
dao.setAscending(scope.isAscending());
@@ -248,9 +251,6 @@ public class BrowseEngine {
}
}
// this is the total number of results in answer to the query
int total = getTotalResults();
// assemble the ORDER BY clause
String orderBy = browseIndex.getSortField(scope.isSecondLevel());
if (scope.getSortBy() > 0) {
@@ -258,6 +258,9 @@ public class BrowseEngine {
}
dao.setOrderField(orderBy);
// this is the total number of results in answer to the query
int total = getTotalResults();
int offset = scope.getOffset();
String rawFocusValue = null;
if (offset < 1 && (scope.hasJumpToItem() || scope.hasJumpToValue() || scope.hasStartsWith())) {
@@ -269,9 +272,6 @@ public class BrowseEngine {
String focusValue = normalizeJumpToValue(rawFocusValue);
log.debug("browsing using focus: " + focusValue);
// Convert the focus value into an offset
offset = getOffsetForValue(focusValue);
}
dao.setOffset(offset);
@@ -684,13 +684,11 @@ public class BrowseEngine {
// our count, storing them locally to reinstate later
String focusField = dao.getJumpToField();
String focusValue = dao.getJumpToValue();
String orderField = dao.getOrderField();
int limit = dao.getLimit();
int offset = dao.getOffset();
dao.setJumpToField(null);
dao.setJumpToValue(null);
dao.setOrderField(null);
dao.setLimit(-1);
dao.setOffset(-1);
@@ -700,7 +698,6 @@ public class BrowseEngine {
// now put back the values we removed for this method
dao.setJumpToField(focusField);
dao.setJumpToValue(focusValue);
dao.setOrderField(orderField);
dao.setLimit(limit);
dao.setOffset(offset);
dao.setCountValues(null);

View File

@@ -205,6 +205,9 @@ public class SolrBrowseDAO implements BrowseDAO {
} else if (valuePartial) {
query.addFilterQueries("{!field f=" + facetField + "_partial}" + value);
}
if (StringUtils.isNotBlank(startsWith) && orderField != null) {
query.addFilterQueries("bi_" + orderField + "_sort:" + startsWith + "*");
}
// filter on item to be sure to don't include any other object
// indexed in the Discovery Search core
query.addFilterQueries("search.resourcetype:" + IndexableItem.TYPE);

View File

@@ -59,7 +59,7 @@ public class MetadataValue implements ReloadableEntity<Integer> {
* The value of the field
*/
@Lob
@Type(type = "org.hibernate.type.MaterializedClobType")
@Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType")
@Column(name = "text_value")
private String value;

View File

@@ -130,12 +130,6 @@ public abstract class XSLTCrosswalk extends SelfNamedPlugin {
return aliasList.toArray(new String[aliasList.size()]);
}
/**
* We need to force this, because some dependency elsewhere interferes.
*/
private static final String TRANSFORMER_FACTORY_CLASS
= "com.sun.org.apache.xalan.internal.xsltc.trax.TransformerFactoryImpl";
private Transformer transformer = null;
private File transformFile = null;
private long transformLastModified = 0;
@@ -181,8 +175,7 @@ public abstract class XSLTCrosswalk extends SelfNamedPlugin {
Source transformSource
= new StreamSource(new FileInputStream(transformFile));
TransformerFactory transformerFactory
= TransformerFactory.newInstance(
TRANSFORMER_FACTORY_CLASS, null);
= TransformerFactory.newInstance();
transformer = transformerFactory.newTransformer(transformSource);
transformLastModified = transformFile.lastModified();
} catch (TransformerConfigurationException | FileNotFoundException e) {

View File

@@ -7,7 +7,8 @@
*/
package org.dspace.content.logic;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.core.Context;
@@ -21,7 +22,7 @@ import org.dspace.core.Context;
*/
public class DefaultFilter implements Filter {
private LogicalStatement statement;
private static Logger log = Logger.getLogger(Filter.class);
private final static Logger log = LogManager.getLogger();
/**
* Set statement from Spring configuration in item-filters.xml

View File

@@ -10,7 +10,8 @@ package org.dspace.content.logic.condition;
import java.sql.SQLException;
import java.util.List;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
@@ -26,7 +27,7 @@ import org.dspace.core.Context;
* @version $Revision$
*/
public class InCommunityCondition extends AbstractCondition {
private static Logger log = Logger.getLogger(InCommunityCondition.class);
private final static Logger log = LogManager.getLogger();
/**
* Return true if item is in one of the specified collections

View File

@@ -7,7 +7,8 @@
*/
package org.dspace.content.logic.condition;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.content.logic.LogicalStatementException;
import org.dspace.core.Context;
@@ -19,7 +20,7 @@ import org.dspace.core.Context;
* @version $Revision$
*/
public class IsWithdrawnCondition extends AbstractCondition {
private static Logger log = Logger.getLogger(IsWithdrawnCondition.class);
private final static Logger log = LogManager.getLogger();
/**
* Return true if item is withdrawn

View File

@@ -11,7 +11,8 @@ import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.content.MetadataValue;
import org.dspace.content.logic.LogicalStatementException;
@@ -26,7 +27,7 @@ import org.dspace.core.Context;
*/
public class MetadataValueMatchCondition extends AbstractCondition {
private static Logger log = Logger.getLogger(MetadataValueMatchCondition.class);
private final static Logger log = LogManager.getLogger();
/**
* Return true if any value for a specified field in the item matches a specified regex pattern

View File

@@ -11,7 +11,8 @@ import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.content.MetadataValue;
import org.dspace.content.logic.LogicalStatementException;
@@ -26,7 +27,7 @@ import org.dspace.core.Context;
*/
public class MetadataValuesMatchCondition extends AbstractCondition {
private static Logger log = Logger.getLogger(MetadataValuesMatchCondition.class);
private final static Logger log = LogManager.getLogger();
/**
* Return true if any value for a specified field in the item matches any of the specified regex patterns

View File

@@ -10,7 +10,8 @@ package org.dspace.content.logic.condition;
import java.sql.SQLException;
import java.util.List;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService;
@@ -27,7 +28,7 @@ import org.dspace.core.Context;
* @version $Revision$
*/
public class ReadableByGroupCondition extends AbstractCondition {
private static Logger log = Logger.getLogger(ReadableByGroupCondition.class);
private final static Logger log = LogManager.getLogger();
// Authorize service
AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();

View File

@@ -7,6 +7,7 @@
*/
package org.dspace.curate;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
@@ -154,7 +155,8 @@ public class CitationPage extends AbstractCurationTask {
try {
//Create the cited document
InputStream citedInputStream =
citationDocument.makeCitedDocument(Curator.curationContext(), bitstream).getLeft();
new ByteArrayInputStream(
citationDocument.makeCitedDocument(Curator.curationContext(), bitstream).getLeft());
//Add the cited document to the approiate bundle
this.addCitedPageToItem(citedInputStream, bundle, pBundle,
dBundle, displayMap, item, bitstream);

View File

@@ -19,7 +19,6 @@ public class CurationCliScriptConfiguration extends CurationScriptConfiguration<
public Options getOptions() {
options = super.getOptions();
options.addOption("e", "eperson", true, "email address of curating eperson");
options.getOption("e").setType(String.class);
options.getOption("e").setRequired(true);
return options;
}

View File

@@ -74,25 +74,17 @@ public enum IndexClientOptions {
options
.addOption("r", "remove", true, "remove an Item, Collection or Community from index based on its handle");
options.getOption("r").setType(String.class);
options.addOption("i", "index", true,
"add or update an Item, Collection or Community based on its handle or uuid");
options.getOption("i").setType(boolean.class);
options.addOption("c", "clean", false,
"clean existing index removing any documents that no longer exist in the db");
options.getOption("c").setType(boolean.class);
options.addOption("d", "delete", false,
"delete all records from existing index");
options.getOption("d").setType(boolean.class);
options.addOption("b", "build", false, "(re)build index, wiping out current one if it exists");
options.getOption("b").setType(boolean.class);
options.addOption("s", "spellchecker", false, "Rebuild the spellchecker, can be combined with -b and -f.");
options.getOption("s").setType(boolean.class);
options.addOption("f", "force", false,
"if updating existing index, force each handle to be reindexed even if uptodate");
options.getOption("f").setType(boolean.class);
options.addOption("h", "help", false, "print this help message");
options.getOption("h").setType(boolean.class);
return options;
}
}

View File

@@ -95,7 +95,6 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
100000);
// Use Tika's Text parser as the streams are always from the TEXT bundle (i.e. already extracted text)
// TODO: We may wish to consider using Tika to extract the text in the future.
TextAndCSVParser tikaParser = new TextAndCSVParser();
BodyContentHandler tikaHandler = new BodyContentHandler(charLimit);
Metadata tikaMetadata = new Metadata();

View File

@@ -8,7 +8,6 @@
package org.dspace.disseminate;
import java.awt.Color;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
@@ -297,7 +296,7 @@ public class CitationDocumentServiceImpl implements CitationDocumentService, Ini
}
@Override
public Pair<InputStream, Long> makeCitedDocument(Context context, Bitstream bitstream)
public Pair<byte[], Long> makeCitedDocument(Context context, Bitstream bitstream)
throws IOException, SQLException, AuthorizeException {
PDDocument document = new PDDocument();
PDDocument sourceDocument = new PDDocument();
@@ -318,7 +317,7 @@ public class CitationDocumentServiceImpl implements CitationDocumentService, Ini
document.save(out);
byte[] data = out.toByteArray();
return Pair.of(new ByteArrayInputStream(data), Long.valueOf(data.length));
return Pair.of(data, Long.valueOf(data.length));
}
} finally {

View File

@@ -8,7 +8,6 @@
package org.dspace.disseminate.service;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import org.apache.commons.lang3.tuple.Pair;
@@ -84,7 +83,7 @@ public interface CitationDocumentService {
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
*/
public Pair<InputStream, Long> makeCitedDocument(Context context, Bitstream bitstream)
public Pair<byte[], Long> makeCitedDocument(Context context, Bitstream bitstream)
throws IOException, SQLException, AuthorizeException;
/**

View File

@@ -7,10 +7,12 @@
*/
package org.dspace.eperson;
import java.io.IOException;
import java.util.Date;
import java.util.UUID;
import javax.mail.MessagingException;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.core.Constants;
import org.dspace.core.Context;
@@ -30,16 +32,17 @@ import org.dspace.services.factory.DSpaceServicesFactory;
* Recommended filter: EPerson+Create
*
* @author Stuart Lewis
* @version $Revision$
*/
public class EPersonConsumer implements Consumer {
/**
* log4j logger
*/
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(EPersonConsumer.class);
private static final Logger log
= org.apache.logging.log4j.LogManager.getLogger(EPersonConsumer.class);
protected EPersonService ePersonService
= EPersonServiceFactory.getInstance().getEPersonService();
protected ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
@@ -74,6 +77,7 @@ public class EPersonConsumer implements Consumer {
if (et == Event.CREATE) {
// Notify of new user registration
String notifyRecipient = configurationService.getProperty("registration.notify");
EPerson eperson = ePersonService.find(context, id);
if (notifyRecipient == null) {
notifyRecipient = "";
}
@@ -81,7 +85,6 @@ public class EPersonConsumer implements Consumer {
if (!notifyRecipient.equals("")) {
try {
EPerson eperson = ePersonService.find(context, id);
Email adminEmail = Email
.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(), "registration_notify"));
adminEmail.addRecipient(notifyRecipient);
@@ -103,6 +106,26 @@ public class EPersonConsumer implements Consumer {
"error_emailing_administrator", ""), me);
}
}
// If enabled, send a "welcome" message to the new EPerson.
if (configurationService.getBooleanProperty("mail.welcome.enabled", false)) {
String addressee = eperson.getEmail();
if (StringUtils.isNotBlank(addressee)) {
log.debug("Sending welcome email to {}", addressee);
try {
Email message = Email.getEmail(
I18nUtil.getEmailFilename(context.getCurrentLocale(), "welcome"));
message.addRecipient(addressee);
message.send();
} catch (IOException | MessagingException ex) {
log.warn("Welcome message not sent to {}: {}",
addressee, ex.getMessage());
}
} else {
log.warn("Welcome message not sent to EPerson {} because it has no email address.",
eperson.getID().toString());
}
}
} else if (et == Event.DELETE) {
// TODO: Implement this if required
}

View File

@@ -31,7 +31,8 @@ import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Constants;
import org.dspace.core.Context;
@@ -55,7 +56,7 @@ public class GoogleAsyncEventListener extends AbstractUsageEventListener {
// 20 is the event max set by the GA API
private static final int GA_MAX_EVENTS = 20;
private static final String ANALYTICS_BATCH_ENDPOINT = "https://www.google-analytics.com/batch";
private static Logger log = Logger.getLogger(GoogleAsyncEventListener.class);
private final static Logger log = LogManager.getLogger();
private static String analyticsKey;
private static CloseableHttpClient httpclient;
private static Buffer buffer;

View File

@@ -23,7 +23,7 @@ public class CanvasCacheEvictService {
CacheManager cacheManager;
public void evictSingleCacheValue(String cacheKey) {
Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).evict(cacheKey);
Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).evictIfPresent(cacheKey);
}
}

View File

@@ -26,11 +26,11 @@ public class ManifestsCacheEvictService {
CacheManager cacheManager;
public void evictSingleCacheValue(String cacheKey) {
Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).evict(cacheKey);
Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).evictIfPresent(cacheKey);
}
public void evictAllCacheValues() {
Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).clear();
Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).invalidate();
}
}

View File

@@ -1,70 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics;
import java.util.ArrayList;
import java.util.List;
import com.google.gson.Gson;
/**
* A neutral data object to hold data for statistics.
*/
public class DataTermsFacet {
private List<TermsFacet> terms;
public DataTermsFacet() {
terms = new ArrayList<TermsFacet>();
}
public void addTermFacet(TermsFacet termsFacet) {
terms.add(termsFacet);
}
/**
* Render this data object into JSON format.
*
* An example of the output could be of the format:
* [{"term":"247166","count":10},{"term":"247168","count":6}]
*
* @return JSON-formatted data.
*/
public String toJson() {
Gson gson = new Gson();
return gson.toJson(terms);
}
public static class TermsFacet {
private String term;
private Integer count;
public TermsFacet(String term, Integer count) {
setTerm(term);
setCount(count);
}
public String getTerm() {
return term;
}
public void setTerm(String term) {
this.term = term;
}
public Integer getCount() {
return count;
}
public void setCount(Integer count) {
this.count = count;
}
}
}

View File

@@ -56,15 +56,12 @@ public class RetryFailedOpenUrlTrackerScriptConfiguration<T extends RetryFailedO
Options options = new Options();
options.addOption("a", true, "Add a new \"failed\" row to the table with a url (test purposes only)");
options.getOption("a").setType(String.class);
options.addOption("r", false,
"Retry sending requests to all urls stored in the table with failed requests. " +
"This includes the url that can be added through the -a option.");
options.getOption("r").setType(boolean.class);
options.addOption("h", "help", false, "print this help message");
options.getOption("h").setType(boolean.class);
super.options = options;
}

View File

@@ -49,6 +49,16 @@ public class EntityTypeServiceInitializer implements Callback {
}
}
/**
* The callback name, Flyway will use this to sort the callbacks alphabetically before executing them
* @return The callback name
*/
@Override
public String getCallbackName() {
// Return class name only (not prepended by package)
return EntityTypeServiceInitializer.class.getSimpleName();
}
@Override
public boolean supports(Event event, org.flywaydb.core.api.callback.Context context) {
// Must run AFTER all migrations complete, since it is dependent on Hibernate

View File

@@ -51,6 +51,16 @@ public class GroupServiceInitializer implements Callback {
}
/**
* The callback name, Flyway will use this to sort the callbacks alphabetically before executing them
* @return The callback name
*/
@Override
public String getCallbackName() {
// Return class name only (not prepended by package)
return GroupServiceInitializer.class.getSimpleName();
}
/**
* Events supported by this callback.
* @param event Flyway event

View File

@@ -97,6 +97,16 @@ public class PostgreSQLCryptoChecker implements Callback {
}
}
/**
* The callback name, Flyway will use this to sort the callbacks alphabetically before executing them
* @return The callback name
*/
@Override
public String getCallbackName() {
// Return class name only (not prepended by package)
return PostgreSQLCryptoChecker.class.getSimpleName();
}
/**
* Events supported by this callback.
* @param event Flyway event

View File

@@ -12,6 +12,7 @@ import java.io.IOException;
import java.sql.SQLException;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPathExpressionException;
import org.dspace.administer.MetadataImporter;
import org.dspace.administer.RegistryImportException;
@@ -89,7 +90,7 @@ public class RegistryUpdater implements Callback {
} catch (IOException | SQLException | ParserConfigurationException
| TransformerException | RegistryImportException
| AuthorizeException | NonUniqueMetadataException
| SAXException e) {
| SAXException | XPathExpressionException e) {
log.error("Error attempting to update Bitstream Format and/or Metadata Registries", e);
throw new RuntimeException("Error attempting to update Bitstream Format and/or Metadata Registries", e);
} finally {
@@ -101,6 +102,16 @@ public class RegistryUpdater implements Callback {
}
/**
* The callback name, Flyway will use this to sort the callbacks alphabetically before executing them
* @return The callback name
*/
@Override
public String getCallbackName() {
// Return class name only (not prepended by package)
return RegistryUpdater.class.getSimpleName();
}
/**
* Events supported by this callback.
* @param event Flyway event

View File

@@ -73,6 +73,16 @@ public class SiteServiceInitializer implements Callback {
}
/**
* The callback name, Flyway will use this to sort the callbacks alphabetically before executing them
* @return The callback name
*/
@Override
public String getCallbackName() {
// Return class name only (not prepended by package)
return SiteServiceInitializer.class.getSimpleName();
}
/**
* Events supported by this callback.
* @param event Flyway event

View File

@@ -0,0 +1,57 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.storage.rdbms.hibernate;
import org.apache.commons.lang.StringUtils;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.hibernate.type.AbstractSingleColumnStandardBasicType;
import org.hibernate.type.descriptor.java.StringTypeDescriptor;
import org.hibernate.type.descriptor.sql.ClobTypeDescriptor;
import org.hibernate.type.descriptor.sql.LongVarcharTypeDescriptor;
import org.hibernate.type.descriptor.sql.SqlTypeDescriptor;
/**
* A Hibernate @Type used to properly support the CLOB in both Postgres and Oracle.
* PostgreSQL doesn't have a CLOB type, instead it's a TEXT field.
* Normally, you'd use org.hibernate.type.TextType to support TEXT, but that won't work for Oracle.
* https://github.com/hibernate/hibernate-orm/blob/5.6/hibernate-core/src/main/java/org/hibernate/type/TextType.java
*
* This Type checks if we are using PostgreSQL.
* If so, it configures Hibernate to map CLOB to LongVarChar (same as org.hibernate.type.TextType)
* If not, it uses default CLOB (which works for other databases).
*/
public class DatabaseAwareLobType extends AbstractSingleColumnStandardBasicType<String> {
public static final DatabaseAwareLobType INSTANCE = new DatabaseAwareLobType();
public DatabaseAwareLobType() {
super( getDbDescriptor(), StringTypeDescriptor.INSTANCE );
}
public static SqlTypeDescriptor getDbDescriptor() {
if ( isPostgres() ) {
return LongVarcharTypeDescriptor.INSTANCE;
} else {
return ClobTypeDescriptor.DEFAULT;
}
}
private static boolean isPostgres() {
ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
String dbDialect = configurationService.getProperty("db.dialect");
return StringUtils.containsIgnoreCase(dbDialect, "PostgreSQL");
}
@Override
public String getName() {
return "database_aware_lob";
}
}

View File

@@ -1,67 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.storage.rdbms.hibernate.postgres;
import java.sql.Types;
import org.hibernate.dialect.PostgreSQL82Dialect;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.type.PostgresUUIDType;
import org.hibernate.type.descriptor.sql.LongVarcharTypeDescriptor;
import org.hibernate.type.descriptor.sql.SqlTypeDescriptor;
/**
* UUID's are not supported by default in hibernate due to differences in the database in order to fix this a custom
* sql dialect is needed.
* Source: https://forum.hibernate.org/viewtopic.php?f=1&amp;t=1014157
*
* @author kevinvandevelde at atmire.com
*/
public class DSpacePostgreSQL82Dialect extends PostgreSQL82Dialect {
@Override
public void contributeTypes(final org.hibernate.boot.model.TypeContributions typeContributions,
final ServiceRegistry serviceRegistry) {
super.contributeTypes(typeContributions, serviceRegistry);
typeContributions.contributeType(new InternalPostgresUUIDType());
}
@Override
protected void registerHibernateType(int code, String name) {
super.registerHibernateType(code, name);
}
protected static class InternalPostgresUUIDType extends PostgresUUIDType {
@Override
protected boolean registerUnderJavaType() {
return true;
}
}
/**
* Override is needed to properly support the CLOB on metadatavalue in Postgres and Oracle.
*
* @param sqlCode {@linkplain java.sql.Types JDBC type-code} for the column mapped by this type.
* @return Descriptor for the SQL/JDBC side of a value mapping.
*/
@Override
public SqlTypeDescriptor getSqlTypeDescriptorOverride(int sqlCode) {
SqlTypeDescriptor descriptor;
switch (sqlCode) {
case Types.CLOB: {
descriptor = LongVarcharTypeDescriptor.INSTANCE;
break;
}
default: {
descriptor = super.getSqlTypeDescriptorOverride(sqlCode);
break;
}
}
return descriptor;
}
}

View File

@@ -86,10 +86,11 @@ public class MigrationUtils {
cascade = true;
break;
case "h2":
// In H2, constraints are listed in the "information_schema.constraints" table
// In H2, column constraints are listed in the "INFORMATION_SCHEMA.KEY_COLUMN_USAGE" table
constraintNameSQL = "SELECT DISTINCT CONSTRAINT_NAME " +
"FROM information_schema.constraints " +
"WHERE table_name = ? AND column_list = ?";
"FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE " +
"WHERE TABLE_NAME = ? AND COLUMN_NAME = ?";
cascade = true;
break;
default:
throw new SQLException("DBMS " + dbtype + " is unsupported in this migration.");

View File

@@ -64,12 +64,6 @@ public class SubmissionFormsMigration extends DSpaceRunnable<SubmissionFormsMigr
"<!ELEMENT input-forms (form-map, form-definitions, form-value-pairs) >";
private List<File> tempFiles = new ArrayList<>();
/**
* We need to force this, because some dependency elsewhere interferes.
*/
private static final String TRANSFORMER_FACTORY_CLASS
= "org.apache.xalan.processor.TransformerFactoryImpl";
@Override
public void internalRun() throws TransformerException {
if (help) {
@@ -101,8 +95,7 @@ public class SubmissionFormsMigration extends DSpaceRunnable<SubmissionFormsMigr
Result result = new StreamResult(new File(outputPath));
// Create an instance of TransformerFactory
TransformerFactory transformerFactory = TransformerFactory.newInstance(
TRANSFORMER_FACTORY_CLASS, null);
TransformerFactory transformerFactory = TransformerFactory.newInstance();
Transformer trans;
try {

View File

@@ -53,11 +53,8 @@ public class SubmissionFormsMigrationCliScriptConfiguration<T extends Submission
Options options = new Options();
options.addOption("f", "input-forms", true, "Path to source input-forms.xml file location");
options.getOption("f").setType(String.class);
options.addOption("s", "item-submission", true, "Path to source item-submission.xml file location");
options.getOption("s").setType(String.class);
options.addOption("h", "help", false, "help");
options.getOption("h").setType(boolean.class);
super.options = options;
}

View File

@@ -15,8 +15,11 @@ import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.xpath.XPathAPI;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.w3c.dom.Document;
@@ -56,7 +59,7 @@ public class ControlledVocabulary {
* TODO: add some caching !
*/
public static ControlledVocabulary loadVocabulary(String fileName)
throws IOException, SAXException, ParserConfigurationException, TransformerException {
throws IOException, SAXException, ParserConfigurationException, XPathExpressionException {
StringBuilder filePath = new StringBuilder();
ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
@@ -70,7 +73,9 @@ public class ControlledVocabulary {
if (controlledVocFile.exists()) {
DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
Document document = builder.parse(controlledVocFile);
return loadVocabularyNode(XPathAPI.selectSingleNode(document, "node"), "");
XPath xPath = XPathFactory.newInstance().newXPath();
Node node = (Node) xPath.compile("node").evaluate(document, XPathConstants.NODE);
return loadVocabularyNode(node, "");
} else {
return null;
}
@@ -85,7 +90,8 @@ public class ControlledVocabulary {
* @return a vocabulary node with all its children
* @throws TransformerException should something go wrong with loading the xml
*/
private static ControlledVocabulary loadVocabularyNode(Node node, String initialValue) throws TransformerException {
private static ControlledVocabulary loadVocabularyNode(Node node, String initialValue)
throws XPathExpressionException {
Node idNode = node.getAttributes().getNamedItem("id");
String id = null;
if (idNode != null) {
@@ -102,7 +108,9 @@ public class ControlledVocabulary {
} else {
value = label;
}
NodeList subNodes = XPathAPI.selectNodeList(node, "isComposedBy/node");
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList subNodes = (NodeList) xPath.compile("isComposedBy/node").evaluate(node,
XPathConstants.NODESET);
List<ControlledVocabulary> subVocabularies = new ArrayList<>(subNodes.getLength());
for (int i = 0; i < subNodes.getLength(); i++) {

View File

@@ -245,13 +245,13 @@ insert into most_recent_checksum
)
select
bitstream.bitstream_id,
'1',
true,
CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END,
CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END,
FORMATDATETIME(NOW(),'DD-MM-RRRR HH24:MI:SS'),
FORMATDATETIME(NOW(),'DD-MM-RRRR HH24:MI:SS'),
CASE WHEN bitstream.checksum_algorithm IS NULL THEN 'MD5' ELSE bitstream.checksum_algorithm END,
'1'
true
from bitstream;
-- Update all the deleted checksums
@@ -263,7 +263,7 @@ update most_recent_checksum
set to_be_processed = 0
where most_recent_checksum.bitstream_id in (
select bitstream_id
from bitstream where deleted = '1' );
from bitstream where deleted = true );
-- this will insert into history table
-- for the initial start

View File

@@ -36,7 +36,7 @@ alter table metadatavalue alter column resource_id set not null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
community_id AS resource_id,
4 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id,
@@ -47,7 +47,7 @@ FROM community where not introductory_text is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
community_id AS resource_id,
4 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id,
@@ -58,7 +58,7 @@ FROM community where not short_description is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
community_id AS resource_id,
4 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id,
@@ -69,7 +69,7 @@ FROM community where not side_bar_text is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
community_id AS resource_id,
4 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id,
@@ -80,7 +80,7 @@ FROM community where not copyright_text is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
community_id AS resource_id,
4 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id,
@@ -104,7 +104,7 @@ alter table community drop column name;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
collection_id AS resource_id,
3 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id,
@@ -115,7 +115,7 @@ FROM collection where not introductory_text is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
collection_id AS resource_id,
3 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id,
@@ -126,7 +126,7 @@ FROM collection where not short_description is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
collection_id AS resource_id,
3 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id,
@@ -137,7 +137,7 @@ FROM collection where not side_bar_text is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
collection_id AS resource_id,
3 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id,
@@ -148,7 +148,7 @@ FROM collection where not copyright_text is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
collection_id AS resource_id,
3 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id,
@@ -159,7 +159,7 @@ FROM collection where not name is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
collection_id AS resource_id,
3 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'provenance' and qualifier is null) AS metadata_field_id,
@@ -170,7 +170,7 @@ FROM collection where not provenance_description is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
collection_id AS resource_id,
3 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier = 'license') AS metadata_field_id,
@@ -194,7 +194,7 @@ alter table collection drop column provenance_description;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
bundle_id AS resource_id,
1 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id,
@@ -214,7 +214,7 @@ alter table bundle drop column name;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
bitstream_id AS resource_id,
0 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id,
@@ -225,7 +225,7 @@ FROM bitstream where not name is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
bitstream_id AS resource_id,
0 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id,
@@ -236,7 +236,7 @@ FROM bitstream where not description is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
bitstream_id AS resource_id,
0 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'format' and qualifier is null) AS metadata_field_id,
@@ -247,7 +247,7 @@ FROM bitstream where not user_format_description is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
bitstream_id AS resource_id,
0 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'source' and qualifier is null) AS metadata_field_id,
@@ -269,7 +269,7 @@ alter table bitstream drop column source;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
eperson_group_id AS resource_id,
6 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id,
@@ -288,7 +288,7 @@ alter table epersongroup drop column name;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
eperson_id AS resource_id,
7 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'firstname' and qualifier is null) AS metadata_field_id,
@@ -299,7 +299,7 @@ FROM eperson where not firstname is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
eperson_id AS resource_id,
7 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'lastname' and qualifier is null) AS metadata_field_id,
@@ -310,7 +310,7 @@ FROM eperson where not lastname is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
eperson_id AS resource_id,
7 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'phone' and qualifier is null) AS metadata_field_id,
@@ -321,7 +321,7 @@ FROM eperson where not phone is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT
metadatavalue_seq.nextval as metadata_value_id,
NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
eperson_id AS resource_id,
7 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'language' and qualifier is null) AS metadata_field_id,

View File

@@ -14,11 +14,11 @@ UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_i
SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream
LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id
LEFT JOIN item ON item2bundle.item_id = item.item_id
WHERE item.withdrawn = 1
WHERE item.withdrawn = true
);
UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 1 and resource_id in (
SELECT item2bundle.bundle_id FROM item2bundle
LEFT JOIN item ON item2bundle.item_id = item.item_id
WHERE item.withdrawn = 1
WHERE item.withdrawn = true
);

View File

@@ -17,7 +17,7 @@
INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, start_date, end_date, rpname,
rptype, rpdescription, eperson_id, epersongroup_id, dspace_object)
SELECT
resourcepolicy_seq.nextval AS policy_id,
NEXT VALUE FOR resourcepolicy_seq AS policy_id,
resource_type_id,
resource_id,
-- Insert the Constants.DELETE action

View File

@@ -14,11 +14,11 @@ UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and dspace_object i
SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream
LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id
LEFT JOIN item ON item2bundle.item_id = item.uuid
WHERE item.withdrawn = 1
WHERE item.withdrawn = true
);
UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and dspace_object in (
SELECT item2bundle.bundle_id FROM item2bundle
LEFT JOIN item ON item2bundle.item_id = item.uuid
WHERE item.withdrawn = 1
WHERE item.withdrawn = true
);

View File

@@ -9,10 +9,11 @@
----------------------------------------------------
-- Make sure the metadatavalue.place column starts at 0 instead of 1
----------------------------------------------------
CREATE LOCAL TEMPORARY TABLE mdv_minplace (
dspace_object_id UUID NOT NULL,
metadata_field_id INT NOT NULL,
minplace INT NOT NULL,
minplace INT NOT NULL
);
INSERT INTO mdv_minplace

View File

@@ -15,6 +15,7 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.app.launcher.ScriptLauncher;
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
import org.dspace.authority.AuthoritySearchService;
import org.dspace.authority.MockAuthoritySolrServiceImpl;
import org.dspace.authorize.AuthorizeException;
import org.dspace.builder.AbstractBuilder;
@@ -31,6 +32,7 @@ import org.dspace.kernel.ServiceManager;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.statistics.MockSolrLoggerServiceImpl;
import org.dspace.statistics.MockSolrStatisticsCore;
import org.dspace.statistics.SolrStatisticsCore;
import org.dspace.storage.rdbms.DatabaseUtils;
import org.jdom2.Document;
import org.junit.After;
@@ -183,15 +185,15 @@ public class AbstractIntegrationTestWithDatabase extends AbstractDSpaceIntegrati
searchService.reset();
// Clear the statistics core.
serviceManager
.getServiceByName(null, MockSolrStatisticsCore.class)
.getServiceByName(SolrStatisticsCore.class.getName(), MockSolrStatisticsCore.class)
.reset();
MockSolrLoggerServiceImpl statisticsService = serviceManager
.getServiceByName(null, MockSolrLoggerServiceImpl.class);
.getServiceByName("solrLoggerService", MockSolrLoggerServiceImpl.class);
statisticsService.reset();
MockAuthoritySolrServiceImpl authorityService = serviceManager
.getServiceByName(null, MockAuthoritySolrServiceImpl.class);
.getServiceByName(AuthoritySearchService.class.getName(), MockAuthoritySolrServiceImpl.class);
authorityService.reset();
// Reload our ConfigurationService (to reset configs to defaults again)

View File

@@ -1,181 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import org.dspace.content.Item;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* Drive the POI-based MS Word filter.
*
* @author mwood
*/
public class PoiWordFilterTest {
public PoiWordFilterTest() {
}
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
/**
* Test of getFilteredName method, of class PoiWordFilter.
*/
/*
@Test
public void testGetFilteredName()
{
System.out.println("getFilteredName");
String oldFilename = "";
PoiWordFilter instance = new PoiWordFilter();
String expResult = "";
String result = instance.getFilteredName(oldFilename);
assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
*/
/**
* Test of getBundleName method, of class PoiWordFilter.
*/
/*
@Test
public void testGetBundleName()
{
System.out.println("getBundleName");
PoiWordFilter instance = new PoiWordFilter();
String expResult = "";
String result = instance.getBundleName();
assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
*/
/**
* Test of getFormatString method, of class PoiWordFilter.
*/
/*
@Test
public void testGetFormatString()
{
System.out.println("getFormatString");
PoiWordFilter instance = new PoiWordFilter();
String expResult = "";
String result = instance.getFormatString();
assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
*/
/**
* Test of getDescription method, of class PoiWordFilter.
*/
/*
@Test
public void testGetDescription()
{
System.out.println("getDescription");
PoiWordFilter instance = new PoiWordFilter();
String expResult = "";
String result = instance.getDescription();
assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
*/
/**
* Test of getDestinationStream method, of class PoiWordFilter.
* Read a constant .doc document and examine the extracted text.
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetDestinationStreamDoc()
throws Exception {
System.out.println("getDestinationStream");
Item currentItem = null;
InputStream source;
boolean verbose = false;
PoiWordFilter instance = new PoiWordFilter();
InputStream result;
source = getClass().getResourceAsStream("wordtest.doc");
result = instance.getDestinationStream(currentItem, source, verbose);
assertTrue("Known content was not found", readAll(result).contains("quick brown fox"));
}
/**
* Test of getDestinationStream method, of class PoiWordFilter.
* Read a constant .docx document and examine the extracted text.
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetDestinationStreamDocx()
throws Exception {
System.out.println("getDestinationStream");
Item currentItem = null;
InputStream source;
boolean verbose = false;
PoiWordFilter instance = new PoiWordFilter();
InputStream result;
source = getClass().getResourceAsStream("wordtest.docx");
result = instance.getDestinationStream(currentItem, source, verbose);
assertTrue("Known content was not found", readAll(result).contains("quick brown fox"));
}
/**
* Read the entire content of a stream into a String.
*
* @param stream a stream of UTF-8 characters.
* @return complete content of {@link stream}
* @throws IOException
*/
private static String readAll(InputStream stream)
throws IOException {
if (null == stream) {
return null;
}
byte[] bytes = new byte[stream.available()];
StringBuilder resultSb = new StringBuilder(bytes.length / 2); // Guess: average 2 bytes per character
int howmany;
while ((howmany = stream.read(bytes)) > 0) {
resultSb.append(new String(bytes, 0, howmany, StandardCharsets.UTF_8));
}
return resultSb.toString();
}
}

View File

@@ -0,0 +1,323 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.IOUtils;
import org.dspace.AbstractUnitTest;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.junit.Test;
/**
* Test the TikaTextExtractionFilter using test files for all major formats.
* The test files used below are all located at [dspace-api]/src/test/resources/org/dspace/app/mediafilter/
*
* @author mwood
* @author Tim Donohue
*/
public class TikaTextExtractionFilterTest extends AbstractUnitTest {
private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
/**
* Test of getDestinationStream method using temp file for text extraction
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetDestinationStreamWithUseTempFile()
throws Exception {
TikaTextExtractionFilter instance = new TikaTextExtractionFilter();
// Extract text from file with "use-temp-file=true"
configurationService.setProperty("textextractor.use-temp-file", "true");
InputStream source = getClass().getResourceAsStream("test.pdf");
InputStream result = instance.getDestinationStream(null, source, false);
String tempFileExtractedText = readAll(result);
// Verify text extracted successfully
assertTrue("Known content was not found in .pdf", tempFileExtractedText.contains("quick brown fox"));
// Now, extract text from same file using default, in-memory
configurationService.setProperty("textextractor.use-temp-file", "false");
source = getClass().getResourceAsStream("test.pdf");
result = instance.getDestinationStream(null, source, false);
String inMemoryExtractedText = readAll(result);
// Verify the two results are equal
assertEquals("Extracted text via temp file is the same as in-memory.",
inMemoryExtractedText, tempFileExtractedText);
}
/**
* Test of getDestinationStream method when max characters is less than file size
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetDestinationStreamWithMaxChars()
throws Exception {
TikaTextExtractionFilter instance = new TikaTextExtractionFilter();
// Set "max-chars" to a small value of 100 chars, which is less than the text size of the file.
configurationService.setProperty("textextractor.max-chars", "100");
InputStream source = getClass().getResourceAsStream("test.pdf");
InputStream result = instance.getDestinationStream(null, source, false);
String extractedText = readAll(result);
// Verify we have exactly the first 100 characters
assertEquals(100, extractedText.length());
// Verify it has some text at the beginning of the file, but NOT text near the end
assertTrue("Known beginning content was found", extractedText.contains("This is a text."));
assertFalse("Known ending content was not found", extractedText.contains("Emergency Broadcast System"));
}
/**
* Test of getDestinationStream method using older Microsoft Word document.
* Read a constant .doc document and examine the extracted text.
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetDestinationStreamWithDoc()
throws Exception {
TikaTextExtractionFilter instance = new TikaTextExtractionFilter();
InputStream source = getClass().getResourceAsStream("test.doc");
InputStream result = instance.getDestinationStream(null, source, false);
assertTrue("Known content was not found in .doc", readAll(result).contains("quick brown fox"));
}
/**
* Test of getDestinationStream method using newer Microsoft Word document.
* Read a constant .docx document and examine the extracted text.
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetDestinationStreamWithDocx()
throws Exception {
TikaTextExtractionFilter instance = new TikaTextExtractionFilter();
InputStream source = getClass().getResourceAsStream("test.docx");
InputStream result = instance.getDestinationStream(null, source, false);
assertTrue("Known content was not found in .docx", readAll(result).contains("quick brown fox"));
}
/**
* Test of getDestinationStream method using an ODT document
* Read a constant .odt document and examine the extracted text.
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetDestinationStreamWithODT()
throws Exception {
TikaTextExtractionFilter instance = new TikaTextExtractionFilter();
InputStream source = getClass().getResourceAsStream("test.odt");
InputStream result = instance.getDestinationStream(null, source, false);
assertTrue("Known content was not found in .odt", readAll(result).contains("quick brown fox"));
}
/**
* Test of getDestinationStream method using an RTF document
* Read a constant .rtf document and examine the extracted text.
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetDestinationStreamWithRTF()
throws Exception {
TikaTextExtractionFilter instance = new TikaTextExtractionFilter();
InputStream source = getClass().getResourceAsStream("test.rtf");
InputStream result = instance.getDestinationStream(null, source, false);
assertTrue("Known content was not found in .rtf", readAll(result).contains("quick brown fox"));
}
/**
* Test of getDestinationStream method using a PDF document
* Read a constant .pdf document and examine the extracted text.
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetDestinationStreamWithPDF()
throws Exception {
TikaTextExtractionFilter instance = new TikaTextExtractionFilter();
InputStream source = getClass().getResourceAsStream("test.pdf");
InputStream result = instance.getDestinationStream(null, source, false);
assertTrue("Known content was not found in .pdf", readAll(result).contains("quick brown fox"));
}
/**
* Test of getDestinationStream method using an HTML document
* Read a constant .html document and examine the extracted text.
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetDestinationStreamWithHTML()
throws Exception {
TikaTextExtractionFilter instance = new TikaTextExtractionFilter();
InputStream source = getClass().getResourceAsStream("test.html");
InputStream result = instance.getDestinationStream(null, source, false);
assertTrue("Known content was not found in .html", readAll(result).contains("quick brown fox"));
}
/**
* Test of getDestinationStream method using a TXT document
* Read a constant .txt document and examine the extracted text.
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetDestinationStreamWithTxt()
throws Exception {
TikaTextExtractionFilter instance = new TikaTextExtractionFilter();
InputStream source = getClass().getResourceAsStream("test.txt");
InputStream result = instance.getDestinationStream(null, source, false);
assertTrue("Known content was not found in .txt", readAll(result).contains("quick brown fox"));
}
/**
* Test of getDestinationStream method using a CSV document
* Read a constant .csv document and examine the extracted text.
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetDestinationStreamWithCsv()
throws Exception {
TikaTextExtractionFilter instance = new TikaTextExtractionFilter();
InputStream source = getClass().getResourceAsStream("test.csv");
InputStream result = instance.getDestinationStream(null, source, false);
assertTrue("Known content was not found in .csv", readAll(result).contains("data3,3"));
}
/**
* Test of getDestinationStream method using an XLS document
* Read a constant .xls document and examine the extracted text.
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetDestinationStreamWithXLS()
throws Exception {
TikaTextExtractionFilter instance = new TikaTextExtractionFilter();
InputStream source = getClass().getResourceAsStream("test.xls");
InputStream result = instance.getDestinationStream(null, source, false);
assertTrue("Known content was not found in .xls", readAll(result).contains("data3,3"));
}
/**
* Test of getDestinationStream method using an XLSX document
* Read a constant .xlsx document and examine the extracted text.
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetDestinationStreamWithXLSX()
throws Exception {
TikaTextExtractionFilter instance = new TikaTextExtractionFilter();
InputStream source = getClass().getResourceAsStream("test.xlsx");
InputStream result = instance.getDestinationStream(null, source, false);
assertTrue("Known content was not found in .xlsx", readAll(result).contains("data3,3"));
}
/**
* Test of getDestinationStream method using an ODS document
* Read a constant .ods document and examine the extracted text.
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetDestinationStreamWithODS()
throws Exception {
TikaTextExtractionFilter instance = new TikaTextExtractionFilter();
InputStream source = getClass().getResourceAsStream("test.ods");
InputStream result = instance.getDestinationStream(null, source, false);
assertTrue("Known content was not found in .ods", readAll(result).contains("Data on the second sheet"));
}
/**
* Test of getDestinationStream method using an PPT document
* Read a constant .ppt document and examine the extracted text.
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetDestinationStreamWithPPT()
throws Exception {
TikaTextExtractionFilter instance = new TikaTextExtractionFilter();
InputStream source = getClass().getResourceAsStream("test.ppt");
InputStream result = instance.getDestinationStream(null, source, false);
assertTrue("Known content was not found in .ppt", readAll(result).contains("quick brown fox"));
}
/**
* Test of getDestinationStream method using an PPTX document
* Read a constant .pptx document and examine the extracted text.
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetDestinationStreamWithPPTX()
throws Exception {
TikaTextExtractionFilter instance = new TikaTextExtractionFilter();
InputStream source = getClass().getResourceAsStream("test.pptx");
InputStream result = instance.getDestinationStream(null, source, false);
assertTrue("Known content was not found in .pptx", readAll(result).contains("quick brown fox"));
}
/**
* Test of getDestinationStream method using an ODP document
* Read a constant .odp document and examine the extracted text.
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetDestinationStreamWithODP()
throws Exception {
TikaTextExtractionFilter instance = new TikaTextExtractionFilter();
InputStream source = getClass().getResourceAsStream("test.odp");
InputStream result = instance.getDestinationStream(null, source, false);
assertTrue("Known content was not found in .odp", readAll(result).contains("quick brown fox"));
}
/**
* Read the entire content of a stream into a String.
*
* @param stream a stream of UTF-8 characters.
* @return complete content of stream as a String
* @throws IOException
*/
private static String readAll(InputStream stream)
throws IOException {
return IOUtils.toString(stream, StandardCharsets.UTF_8);
}
}

View File

@@ -8,18 +8,25 @@
package org.dspace.app.util;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.sql.SQLException;
import java.util.Date;
import java.util.List;
import java.util.Map;
import com.google.common.base.Splitter;
import org.apache.logging.log4j.Logger;
import org.dspace.AbstractUnitTest;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.Collection;
@@ -30,6 +37,14 @@ import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamFormatService;
import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.BundleService;
import org.dspace.core.Constants;
import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.GroupService;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.MutablePeriod;
import org.joda.time.format.PeriodFormat;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@@ -52,6 +67,10 @@ public class GoogleMetadataTest extends AbstractUnitTest {
private BitstreamService bitstreamService;
private ResourcePolicyService resourcePolicyService;
private GroupService groupService = EPersonServiceFactory.getInstance().getGroupService();
private Community community;
/**
@@ -80,6 +99,8 @@ public class GoogleMetadataTest extends AbstractUnitTest {
bundleService = ContentServiceFactory.getInstance().getBundleService();
bitstreamFormatService = ContentServiceFactory.getInstance().getBitstreamFormatService();
bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();
resourcePolicyService = AuthorizeServiceFactory.getInstance().getResourcePolicyService();
groupService = EPersonServiceFactory.getInstance().getGroupService();
} catch (AuthorizeException ex) {
log.error("Authorization Error in init", ex);
fail("Authorization Error in init: " + ex.getMessage());
@@ -326,6 +347,45 @@ public class GoogleMetadataTest extends AbstractUnitTest {
assertEquals("small", urlSplitted.get(urlSplitted.size() - 1));
}
/**
* Verify there is no mapping for {@link GoogleMetadata#PDF} if there are only embargoed (non-publically accessible
* bitstream) files
*/
@Test
public void testGetPdfUrlOfEmbargoed() throws Exception {
context.turnOffAuthorisationSystem();
Bundle bundle = ContentServiceFactory.getInstance().getBundleService().create(context, it, "ORIGINAL");
Bitstream b = bitstreamService.create(
context, new ByteArrayInputStream("Larger file than primary".getBytes(StandardCharsets.UTF_8)));
b.setName(context, "first");
b.setFormat(context, bitstreamFormatService.create(context));
b.getFormat(context).setMIMEType("unknown");
bundleService.addBitstream(context, bundle, b);
// Set 3 month embargo on pdf
MutablePeriod period = PeriodFormat.getDefault().parseMutablePeriod("3 months");
Date embargoDate = DateTime.now(DateTimeZone.UTC).plus(period).toDate();
Group anonGroup = groupService.findByName(context, Group.ANONYMOUS);
authorizeService.removeAllPolicies(context, b);
resourcePolicyService.removeAllPolicies(context, b);
ResourcePolicy rp = authorizeService.createOrModifyPolicy(null, context, null, anonGroup,
null, embargoDate, Constants.READ, "GoogleMetadataTest", b);
if (rp != null) {
resourcePolicyService.update(context, rp);
}
GoogleMetadata gm = new GoogleMetadata(this.context, it);
assertTrue(gm.getPDFURL().isEmpty());
// No value for citation_pdf_url because only one embargoed bitstream
boolean containsPdfUrl = false;
for (Map.Entry<String, String> mapping: gm.getMappings()) {
if (mapping.getKey().equalsIgnoreCase(gm.PDF)) {
containsPdfUrl = true;
}
}
assertFalse(containsPdfUrl);
}
@After
@Override
public void destroy() {

View File

@@ -18,6 +18,7 @@ import org.dspace.content.BitstreamFormat;
import org.dspace.content.Bundle;
import org.dspace.content.Item;
import org.dspace.content.service.DSpaceObjectService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.Group;
@@ -26,8 +27,6 @@ import org.dspace.eperson.Group;
*/
public class BitstreamBuilder extends AbstractDSpaceObjectBuilder<Bitstream> {
public static final String ORIGINAL = "ORIGINAL";
private Bitstream bitstream;
private Item item;
private Group readerGroup;
@@ -158,12 +157,12 @@ public class BitstreamBuilder extends AbstractDSpaceObjectBuilder<Bitstream> {
}
private Bundle getOriginalBundle(Item item) throws SQLException, AuthorizeException {
List<Bundle> bundles = itemService.getBundles(item, ORIGINAL);
List<Bundle> bundles = itemService.getBundles(item, Constants.CONTENT_BUNDLE_NAME);
Bundle targetBundle = null;
if (bundles.size() < 1) {
// not found, create a new one
targetBundle = bundleService.create(context, item, ORIGINAL);
targetBundle = bundleService.create(context, item, Constants.CONTENT_BUNDLE_NAME);
} else {
// put bitstreams into first bundle
targetBundle = bundles.iterator().next();

View File

@@ -7,6 +7,8 @@
*/
package org.dspace.builder;
import static org.dspace.content.LicenseUtils.getLicenseText;
import java.io.IOException;
import java.sql.SQLException;
import java.util.UUID;
@@ -15,6 +17,7 @@ import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.DCDate;
import org.dspace.content.Item;
import org.dspace.content.LicenseUtils;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.WorkspaceItem;
import org.dspace.content.service.DSpaceObjectService;
@@ -246,4 +249,16 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder<Item> {
}
}
public ItemBuilder grantLicense() {
String license;
try {
EPerson submitter = workspaceItem.getSubmitter();
submitter = context.reloadEntity(submitter);
license = getLicenseText(context.getCurrentLocale(), workspaceItem.getCollection(), item, submitter);
LicenseUtils.grantLicense(context, item, license, null);
} catch (Exception e) {
handleException(e);
}
return this;
}
}

View File

@@ -11,7 +11,8 @@ import java.sql.SQLException;
import java.util.Objects;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.core.Context;
@@ -26,7 +27,7 @@ import org.dspace.versioning.service.VersioningService;
*/
public class VersionBuilder extends AbstractBuilder<Version, VersioningService> {
private static final Logger log = Logger.getLogger(VersionBuilder.class);
private static final Logger log = LogManager.getLogger(VersionBuilder.class);
private Version version;

View File

@@ -15,6 +15,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import org.apache.commons.lang.StringUtils;
import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.builder.BitstreamBuilder;
import org.dspace.builder.CollectionBuilder;
@@ -408,7 +409,7 @@ public class CanvasDimensionsIT extends AbstractIntegrationTestWithDatabase {
execCanvasScriptWithMaxRecs(id);
// check System.out for number of items processed.
assertEquals("2 IIIF items were processed.\n", outContent.toString());
assertEquals("2 IIIF items were processed.", StringUtils.chomp(outContent.toString()));
}
@Test

View File

@@ -54,9 +54,7 @@ public class MockDSpaceRunnableScriptConfiguration<T extends MockDSpaceRunnableS
Options options = new Options();
options.addOption("r", "remove", true, "description r");
options.getOption("r").setType(String.class);
options.addOption("i", "index", false, "description i");
options.getOption("i").setType(boolean.class);
options.getOption("i").setRequired(true);
options.addOption("f", "file", true, "source file");
options.getOption("f").setType(InputStream.class);

View File

@@ -171,6 +171,7 @@ public class MockSolrServer {
* Discard the embedded Solr container.
*/
private static synchronized void destroyContainer() {
container.shutdown();
container = null;
log.info("SOLR CoreContainer destroyed");
}

View File

@@ -0,0 +1,4 @@
row1,row2,row3,row4
"data1,2","data 2,2","data3,2","data4,2"
"data1,3","data 2,3","data3,3","data4,3"
"data1,4","data2,4","data3,4","data4,4"
1 row1 row2 row3 row4
2 data1,2 data 2,2 data3,2 data4,2
3 data1,3 data 2,3 data3,3 data4,3
4 data1,4 data2,4 data3,4 data4,4

View File

@@ -0,0 +1,53 @@
<html>
<head>
<meta http-equiv=Content-Type content="text/html; charset=UTF-8">
<title>A Text Extraction Test Document for DSpace</title>
</head>
<body>
<div>
<p style='text-align:center'><span
style='font-size:15.0pt'>A Text Extraction Test Document</span></p>
<p style='text-align:center'><span
style='font-size:10.0pt'>for</span></p>
<p style='text-align:center'><span
style='font-size:15.0pt'>DSpace</span></p>
<p></p>
<p>This is a text. For the next sixty seconds this software
will conduct a test of the DSpace text extraction facility. This is only a
text.</p>
<p>This is a paragraph that followed the first that lived in
the document that Jack built.</p>
<p>Lorem ipsum dolor sit amet. The quick brown fox jumped over
the lazy dog. Yow! Are we having fun yet?</p>
<p>This has been a test of the DSpace text extraction system.
In the event of actual content you would care what is written here.</p>
</div>
<br/>
<hr/>
<div>
<p>Tip o the hat to the U.S. Emergency Broadcast System for the format that I have
irreverently borrowed.</p>
</div>
</div>
</body>
</html>

View File

@@ -0,0 +1,239 @@
{\rtf1\adeflang1025\ansi\ansicpg1252\uc1\adeff46\deff0\stshfdbch45\stshfloch43\stshfhich43\stshfbi46\deflang1033\deflangfe1033\themelang1033\themelangfe0\themelangcs0{\fonttbl{\f34\fbidi \froman\fcharset0\fprq2{\*\panose 02040503050406030204}Cambria Math;}{\f43\fbidi \froman\fcharset0\fprq2 Liberation Serif{\*\falt Times New Roman};}
{\f44\fbidi \fswiss\fcharset0\fprq2 Liberation Sans{\*\falt Arial};}{\f45\fbidi \froman\fcharset0\fprq0{\*\panose 00000000000000000000}AR PL SungtiL GB;}{\f46\fbidi \froman\fcharset0\fprq0{\*\panose 00000000000000000000}Lohit Hindi;}
{\flomajor\f31500\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;}{\fdbmajor\f31501\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;}
{\fhimajor\f31502\fbidi \fswiss\fcharset0\fprq2{\*\panose 020f0302020204030204}Calibri Light;}{\fbimajor\f31503\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;}
{\flominor\f31504\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;}{\fdbminor\f31505\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;}
{\fhiminor\f31506\fbidi \fswiss\fcharset0\fprq2{\*\panose 020f0502020204030204}Calibri;}{\fbiminor\f31507\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;}{\f1504\fbidi \froman\fcharset238\fprq2 Cambria Math CE;}
{\f1505\fbidi \froman\fcharset204\fprq2 Cambria Math Cyr;}{\f1507\fbidi \froman\fcharset161\fprq2 Cambria Math Greek;}{\f1508\fbidi \froman\fcharset162\fprq2 Cambria Math Tur;}{\f1511\fbidi \froman\fcharset186\fprq2 Cambria Math Baltic;}
{\f1512\fbidi \froman\fcharset163\fprq2 Cambria Math (Vietnamese);}{\flomajor\f31508\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\flomajor\f31509\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}
{\flomajor\f31511\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\flomajor\f31512\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\flomajor\f31513\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}
{\flomajor\f31514\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}{\flomajor\f31515\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\flomajor\f31516\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}
{\fdbmajor\f31518\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\fdbmajor\f31519\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}{\fdbmajor\f31521\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}
{\fdbmajor\f31522\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\fdbmajor\f31523\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}{\fdbmajor\f31524\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}
{\fdbmajor\f31525\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\fdbmajor\f31526\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}{\fhimajor\f31528\fbidi \fswiss\fcharset238\fprq2 Calibri Light CE;}
{\fhimajor\f31529\fbidi \fswiss\fcharset204\fprq2 Calibri Light Cyr;}{\fhimajor\f31531\fbidi \fswiss\fcharset161\fprq2 Calibri Light Greek;}{\fhimajor\f31532\fbidi \fswiss\fcharset162\fprq2 Calibri Light Tur;}
{\fhimajor\f31533\fbidi \fswiss\fcharset177\fprq2 Calibri Light (Hebrew);}{\fhimajor\f31534\fbidi \fswiss\fcharset178\fprq2 Calibri Light (Arabic);}{\fhimajor\f31535\fbidi \fswiss\fcharset186\fprq2 Calibri Light Baltic;}
{\fhimajor\f31536\fbidi \fswiss\fcharset163\fprq2 Calibri Light (Vietnamese);}{\fbimajor\f31538\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\fbimajor\f31539\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}
{\fbimajor\f31541\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\fbimajor\f31542\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\fbimajor\f31543\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}
{\fbimajor\f31544\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}{\fbimajor\f31545\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\fbimajor\f31546\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}
{\flominor\f31548\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\flominor\f31549\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}{\flominor\f31551\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}
{\flominor\f31552\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\flominor\f31553\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}{\flominor\f31554\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}
{\flominor\f31555\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\flominor\f31556\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}{\fdbminor\f31558\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}
{\fdbminor\f31559\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}{\fdbminor\f31561\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\fdbminor\f31562\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}
{\fdbminor\f31563\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}{\fdbminor\f31564\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}{\fdbminor\f31565\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}
{\fdbminor\f31566\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}{\fhiminor\f31568\fbidi \fswiss\fcharset238\fprq2 Calibri CE;}{\fhiminor\f31569\fbidi \fswiss\fcharset204\fprq2 Calibri Cyr;}
{\fhiminor\f31571\fbidi \fswiss\fcharset161\fprq2 Calibri Greek;}{\fhiminor\f31572\fbidi \fswiss\fcharset162\fprq2 Calibri Tur;}{\fhiminor\f31573\fbidi \fswiss\fcharset177\fprq2 Calibri (Hebrew);}
{\fhiminor\f31574\fbidi \fswiss\fcharset178\fprq2 Calibri (Arabic);}{\fhiminor\f31575\fbidi \fswiss\fcharset186\fprq2 Calibri Baltic;}{\fhiminor\f31576\fbidi \fswiss\fcharset163\fprq2 Calibri (Vietnamese);}
{\fbiminor\f31578\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\fbiminor\f31579\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}{\fbiminor\f31581\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}
{\fbiminor\f31582\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\fbiminor\f31583\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}{\fbiminor\f31584\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}
{\fbiminor\f31585\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\fbiminor\f31586\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}{\f1164\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}
{\f1165\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}{\f1167\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\f1168\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\f1169\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}
{\f1170\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}{\f1171\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\f1172\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}}{\colortbl;\red0\green0\blue0;\red0\green0\blue255;
\red0\green255\blue255;\red0\green255\blue0;\red255\green0\blue255;\red255\green0\blue0;\red255\green255\blue0;\red255\green255\blue255;\red0\green0\blue128;\red0\green128\blue128;\red0\green128\blue0;\red128\green0\blue128;\red128\green0\blue0;
\red128\green128\blue0;\red128\green128\blue128;\red192\green192\blue192;\red0\green0\blue0;\red0\green0\blue0;}{\*\defchp \fs24\lang1033\langfe2052\loch\af43\hich\af43\dbch\af45\langfenp2052 }{\*\defpap
\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 }\noqfpromote {\stylesheet{\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0
\fs24\lang1033\langfe2052\loch\f43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 \snext0 \sqformat \spriority0 Normal;}{\*\cs10 \additive \ssemihidden \sunhideused \spriority1 Default Paragraph Font;}{\*
\ts11\tsrowd\trftsWidthB3\trpaddl108\trpaddr108\trpaddfl3\trpaddft3\trpaddfb3\trpaddfr3\trcbpat1\trcfpat1\tblind0\tblindtype3\tsvertalt\tsbrdrt\tsbrdrl\tsbrdrb\tsbrdrr\tsbrdrdgl\tsbrdrdgr\tsbrdrh\tsbrdrv
\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0 \fs24\lang1033\langfe2052\loch\f43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 \snext11 \ssemihidden \sunhideused
Normal Table;}{\*\cs15 \additive \sqformat \spriority0 Footnote Characters;}{\*\cs16 \additive \super \spriority0 Footnote Anchor;}{\*\cs17 \additive \super \spriority0 Endnote Anchor;}{\*\cs18 \additive \sqformat \spriority0 Endnote Characters;}{
\s19\ql \li0\ri0\sb240\sa120\keepn\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs28\alang1081 \ltrch\fcs0 \fs28\lang1033\langfe2052\loch\f44\hich\af44\dbch\af45\cgrid\langnp1033\langfenp2052
\sbasedon0 \snext20 \sqformat \spriority0 Heading;}{\s20\ql \li0\ri0\sa140\sl288\slmult1\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0
\fs24\lang1033\langfe2052\loch\f43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 \sbasedon0 \snext20 \spriority0 Body Text;}{\s21\ql \li0\ri0\sa140\sl288\slmult1\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1
\af46\afs24\alang1081 \ltrch\fcs0 \fs24\lang1033\langfe2052\loch\f43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 \sbasedon20 \snext21 \spriority0 List;}{
\s22\ql \li0\ri0\sb120\sa120\widctlpar\noline\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \ai\af46\afs24\alang1081 \ltrch\fcs0 \i\fs24\lang1033\langfe2052\loch\f43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052
\sbasedon0 \snext22 \sqformat \spriority0 caption;}{\s23\ql \li0\ri0\widctlpar\noline\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0
\fs24\lang1033\langfe2052\loch\f43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 \sbasedon0 \snext23 \sqformat \spriority0 Index;}{\s24\ql \fi-339\li339\ri0\widctlpar\noline\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin339\itap0 \rtlch\fcs1
\af46\afs20\alang1081 \ltrch\fcs0 \fs20\lang1033\langfe2052\loch\f43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 \sbasedon0 \snext24 \spriority0 footnote text;}}{\*\rsidtbl \rsid6097384\rsid16590483\rsid16671749}{\mmathPr\mmathFont34\mbrkBin0
\mbrkBinSub0\msmallFrac0\mdispDef1\mlMargin0\mrMargin0\mdefJc1\mwrapIndent1440\mintLim0\mnaryLim1}{\info{\title A Text Extraction Test Document for DSpace}{\author Mark Wood}{\operator Tim Donohue}{\creatim\yr2022\mo3\dy30\hr13\min54}
{\revtim\yr2022\mo3\dy30\hr13\min54}{\version2}{\edmins0}{\nofpages1}{\nofwords75}{\nofchars433}{\nofcharsws507}{\vern43}}{\*\xmlnstbl {\xmlns1 http://schemas.microsoft.com/office/word/2003/wordml}}
\paperw12240\paperh15840\margl1134\margr1134\margt1134\margb1134\gutter0\ltrsect
\deftab709\widowctrl\ftnbj\aenddoc\trackmoves0\trackformatting1\donotembedsysfont1\relyonvml0\donotembedlingdata0\grfdocevents0\validatexml1\showplaceholdtext0\ignoremixedcontent0\saveinvalidxml0\showxmlerrors1
\noxlattoyen\expshrtn\noultrlspc\dntblnsbdb\nospaceforul\formshade\horzdoc\dgmargin\dghspace180\dgvspace180\dghorigin450\dgvorigin0\dghshow1\dgvshow1
\jexpand\viewkind5\viewscale100\pgbrdrhead\pgbrdrfoot\splytwnine\ftnlytwnine\htmautsp\nolnhtadjtbl\useltbaln\alntblind\lytcalctblwd\lyttblrtgr\lnbrkrule\nobrkwrptbl\snaptogridincell\allowfieldendsel\wrppunct
\asianbrkrule\rsidroot6097384\newtblstyruls\nogrowautofit\usenormstyforlist\noindnmbrts\felnbrelev\nocxsptable\indrlsweleven\noafcnsttbl\afelev\utinl\hwelev\spltpgpar\notcvasp\notbrkcnstfrctbl\notvatxbx\krnprsnet\cachedcolbal \nouicompat \fet0
{\*\wgrffmtfilter 2450}\nofeaturethrottle1\ilfomacatclnup0{\*\ftnsep \ltrpar \pard\plain \ltrpar\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0
\fs24\lang1033\langfe2052\loch\af43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 {\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \chftnsep
\par }}{\*\ftnsepc \ltrpar \pard\plain \ltrpar\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0
\fs24\lang1033\langfe2052\loch\af43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 {\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \chftnsepc
\par }}{\*\aftnsep \ltrpar \pard\plain \ltrpar\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0
\fs24\lang1033\langfe2052\loch\af43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 {\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \chftnsep
\par }}{\*\aftnsepc \ltrpar \pard\plain \ltrpar\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0
\fs24\lang1033\langfe2052\loch\af43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 {\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \chftnsepc
\par }}\ltrpar \sectd \ltrsect\linex0\headery0\footery0\endnhere\sectunlocked1\sectdefaultcl\sftnbj {\*\pnseclvl1\pnucrm\pnstart1\pnindent720\pnhang {\pntxta .}}{\*\pnseclvl2\pnucltr\pnstart1\pnindent720\pnhang {\pntxta .}}{\*\pnseclvl3
\pndec\pnstart1\pnindent720\pnhang {\pntxta .}}{\*\pnseclvl4\pnlcltr\pnstart1\pnindent720\pnhang {\pntxta )}}{\*\pnseclvl5\pndec\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}{\*\pnseclvl6\pnlcltr\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}
{\*\pnseclvl7\pnlcrm\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}{\*\pnseclvl8\pnlcltr\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}{\*\pnseclvl9\pnlcrm\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}\pard\plain \ltrpar
\qc \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0 \fs24\lang1033\langfe2052\loch\af43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 {\rtlch\fcs1 \af46\afs30 \ltrch\fcs0
\fs30\insrsid16671749 \hich\af43\dbch\af45\loch\f43 A Text Extraction Test Document}{\rtlch\fcs1 \af46\afs30 \ltrch\fcs0 \fs30\insrsid6097384
\par }{\rtlch\fcs1 \af46\afs20 \ltrch\fcs0 \fs20\insrsid16671749 \hich\af43\dbch\af45\loch\f43 for}{\rtlch\fcs1 \af46\afs20 \ltrch\fcs0 \fs20\insrsid6097384
\par }{\rtlch\fcs1 \af46\afs30 \ltrch\fcs0 \fs30\insrsid16671749 \hich\af43\dbch\af45\loch\f43 DSpace}{\rtlch\fcs1 \af46\afs30 \ltrch\fcs0 \fs30\insrsid6097384
\par
\par }\pard \ltrpar\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 {\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \hich\af43\dbch\af45\loch\f43
This is a text. For the next sixty seconds this software will conduct a test of the DSpace text extraction facility. This is only a text.}{\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid6097384
\par
\par }{\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \hich\af43\dbch\af45\loch\f43 This is a paragraph that followed the first that lived in the \hich\af43\dbch\af45\loch\f43 document that Jack built.}{\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid6097384
\par
\par }{\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \hich\af43\dbch\af45\loch\f43 Lorem ipsum dolor sit amet. The quick brown fox jumped over the lazy dog. Yow! Are we having fun yet?}{\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid6097384
\par
\par }{\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \hich\af43\dbch\af45\loch\f43 This has been a test of the DSpace text extraction system. In the event of actual content you would care what is written he\hich\af43\dbch\af45\loch\f43 re.}{\rtlch\fcs1
\af46 \ltrch\fcs0 \cs16\super\insrsid16671749 \chftn {\footnote \ltrpar \pard\plain \ltrpar\s24\ql \fi-339\li339\ri0\widctlpar\noline\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin339\itap0 \rtlch\fcs1 \af46\afs20\alang1081 \ltrch\fcs0
\fs20\lang1033\langfe2052\loch\af43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 {\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \chftn \tab \hich\af43\dbch\af45\loch\f43 Tip o\hich\f43 \rquote \loch\f43
the hat to the U.S. Emergency Broadcast System for the format that I have irreverently borrowed.}}}{\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid6097384
\par }{\*\themedata 504b030414000600080000002100e9de0fbfff0000001c020000130000005b436f6e74656e745f54797065735d2e786d6cac91cb4ec3301045f748fc83e52d4a
9cb2400825e982c78ec7a27cc0c8992416c9d8b2a755fbf74cd25442a820166c2cd933f79e3be372bd1f07b5c3989ca74aaff2422b24eb1b475da5df374fd9ad
5689811a183c61a50f98f4babebc2837878049899a52a57be670674cb23d8e90721f90a4d2fa3802cb35762680fd800ecd7551dc18eb899138e3c943d7e503b6
b01d583deee5f99824e290b4ba3f364eac4a430883b3c092d4eca8f946c916422ecab927f52ea42b89a1cd59c254f919b0e85e6535d135a8de20f20b8c12c3b0
0c895fcf6720192de6bf3b9e89ecdbd6596cbcdd8eb28e7c365ecc4ec1ff1460f53fe813d3cc7f5b7f020000ffff0300504b030414000600080000002100a5d6
a7e7c0000000360100000b0000005f72656c732f2e72656c73848fcf6ac3300c87ef85bd83d17d51d2c31825762fa590432fa37d00e1287f68221bdb1bebdb4f
c7060abb0884a4eff7a93dfeae8bf9e194e720169aaa06c3e2433fcb68e1763dbf7f82c985a4a725085b787086a37bdbb55fbc50d1a33ccd311ba548b6309512
0f88d94fbc52ae4264d1c910d24a45db3462247fa791715fd71f989e19e0364cd3f51652d73760ae8fa8c9ffb3c330cc9e4fc17faf2ce545046e37944c69e462
a1a82fe353bd90a865aad41ed0b5b8f9d6fd010000ffff0300504b0304140006000800000021006b799616830000008a0000001c0000007468656d652f746865
6d652f7468656d654d616e616765722e786d6c0ccc4d0ac3201040e17da17790d93763bb284562b2cbaebbf600439c1a41c7a0d29fdbd7e5e38337cedf14d59b
4b0d592c9c070d8a65cd2e88b7f07c2ca71ba8da481cc52c6ce1c715e6e97818c9b48d13df49c873517d23d59085adb5dd20d6b52bd521ef2cdd5eb9246a3d8b
4757e8d3f729e245eb2b260a0238fd010000ffff0300504b030414000600080000002100b6f4679893070000c9200000160000007468656d652f7468656d652f
7468656d65312e786d6cec59cd8b1bc915bf07f23f347d97f5d5ad8fc1f2a24fcfda33b6b164873dd648a5eef2547789aad28cc56208de532e81c026e49085bd
ed21842cecc22eb9e48f31d8249b3f22afaa5bdd5552c99e191c3061463074977eefd5afde7bf5de53d5ddcf5e26d4bbc05c1096f6fcfa9d9aefe174ce16248d
7afeb3d9a4d2f13d2151ba4094a5b8e76fb0f03fbbf7eb5fdd454732c609f6403e1547a8e7c752ae8eaa5531876124eeb0154ee1bb25e30992f0caa3ea82a34b
d09bd06aa3566b55134452df4b51026a1f2f97648ebd9952e9dfdb2a1f53784da5500373caa74a35b6243476715e5708b11143cabd0b447b3eccb3609733fc52
fa1e4542c2173dbfa6fffceabdbb5574940b517940d6909be8bf5c2e17589c37f49c3c3a2b260d823068f50bfd1a40e53e6edc1eb7c6ad429f06a0f91c569a71
b175b61bc320c71aa0ecd1a17bd41e35eb16ded0dfdce3dc0fd5c7c26b50a63fd8c34f2643b0a285d7a00c1feee1c3417730b2f56b50866fede1dbb5fe28685b
fa3528a6243ddf43d7c25673b85d6d0159327aec8477c360d26ee4ca4b144443115d6a8a254be5a1584bd00bc6270050408a24493db959e1259a43140f112567
9c7827248a21f056286502866b8ddaa4d684ffea13e827ed5174849121ad780113b137a4f87862cec94af6fc07a0d537206f7ffef9cdeb1fdfbcfee9cd575fbd
79fdf77c6eadca923b466964cafdf2dd1ffef3cd6fbd7ffff0ed2f5fff319b7a172f4cfcbbbffdeedd3ffef93ef5b0e2d2146ffff4fdbb1fbf7ffbe7dfffebaf
5f3bb4f7393a33e1339260e13dc297de5396c0021dfcf119bf9ec42c46c494e8a791402952b338f48f656ca11f6d10450edc00db767cce21d5b880f7d72f2cc2
d398af2571687c182716f094313a60dc6985876a2ec3ccb3751ab927e76b13f714a10bd7dc43945a5e1eaf579063894be530c616cd2714a5124538c5d253dfb1
738c1dabfb8210cbaea764ce99604be97d41bc01224e93ccc899154da5d03149c02f1b1741f0b7659bd3e7de8051d7aa47f8c246c2de40d4417e86a965c6fb68
2d51e252394309350d7e8264ec2239ddf0b9891b0b099e8e3065de78818570c93ce6b05ec3e90f21cdb8dd7e4a37898de4929cbb749e20c64ce4889d0f6394ac
5cd829496313fbb938871045de13265df05366ef10f50e7e40e941773f27d872f787b3c133c8b026a53240d4376beef0e57dccacf89d6ee8126157aae9f3c44a
b17d4e9cd131584756689f604cd1255a60ec3dfbdcc160c05696cd4bd20f62c82ac7d815580f901dabea3dc5027a25d5dcece7c91322ac909de2881de073bad9
493c1b9426881fd2fc08bc6eda7c0ca52e7105c0633a3f37818f08f480102f4ea33c16a0c308ee835a9fc4c82a60ea5db8e375c32dff5d658fc1be7c61d1b8c2
be04197c6d1948eca6cc7b6d3343d49aa00c9819822ec3956e41c4727f29a28aab165b3be596f6a62ddd00dd91d5f42424fd6007b4d3fb84ffbbde073a8cb77f
f9c6b10f3e4ebfe3566c25ab6b763a8792c9f14e7f7308b7dbd50c195f904fbfa919a175fa04431dd9cf58b73dcd6d4fe3ffdff73487f6f36d2773a8dfb8ed64
7ce8306e3b99fc70e5e3743265f3027d8d3af0c80e7af4b14f72f0d46749289dca0dc527421ffc08f83db398c0a092d3279eb838055cc5f0a8ca1c4c60e1228e
b48cc799fc0d91f134462b381daafb4a492472d591f0564cc0a1911e76ea5678ba4e4ed9223becacd7d5c16656590592e5782d2cc6e1a04a66e856bb3cc02bd4
6bb6913e68dd1250b2d721614c6693683a48b4b783ca48fa58178ce620a157f65158741d2c3a4afdd6557b2c805ae115f8c1edc1cff49e1f06200242701e07cd
f942f92973f5d6bbda991fd3d3878c69450034d8db08283ddd555c0f2e4fad2e0bb52b78da2261849b4d425b46377822869fc17974aad1abd0b8aeafbba54b2d
7aca147a3e08ad9246bbf33e1637f535c8ede6069a9a9982a6de65cf6f35430899395af5fc251c1ac363b282d811ea3717a211dcbccc25cf36fc4d32cb8a0b39
4222ce0cae934e960d122231f728497abe5a7ee1069aea1ca2b9d51b90103e59725d482b9f1a3970baed64bc5ce2b934dd6e8c284b67af90e1b35ce1fc568bdf
1cac24d91adc3d8d1797de195df3a708422c6cd795011744c0dd413db3e682c0655891c8caf8db294c79da356fa3740c65e388ae62945714339967709dca0b3a
faadb081f196af190c6a98242f8467912ab0a651ad6a5a548d8cc3c1aafb6121653923699635d3ca2aaa6abab39835c3b60cecd8f26645de60b53531e434b3c2
67a97b37e576b7b96ea74f28aa0418bcb09fa3ea5ea12018d4cac92c6a8af17e1a56393b1fb56bc776811fa07695226164fdd656ed8edd8a1ae19c0e066f54f9
416e376a6168b9ed2bb5a5f5adb979b1cdce5e40f2184197bba6526857c2c92e47d0104d754f92a50dd8222f65be35e0c95b73d2f3bfac85fd60d80887955a27
1c57826650ab74c27eb3d20fc3667d1cd66ba341e31514161927f530bbb19fc00506dde4f7f67a7cefee3ed9ded1dc99b3a4caf4dd7c5513d777f7f5c6e1bb7b
8f40d2f9b2d598749bdd41abd26df627956034e854bac3d6a0326a0ddba3c9681876ba9357be77a1c141bf390c5ae34ea5551f0e2b41aba6e877ba9576d068f4
8376bf330efaaff23606569ea58fdc16605ecdebde7f010000ffff0300504b0304140006000800000021000dd1909fb60000001b010000270000007468656d65
2f7468656d652f5f72656c732f7468656d654d616e616765722e786d6c2e72656c73848f4d0ac2301484f78277086f6fd3ba109126dd88d0add40384e4350d36
3f2451eced0dae2c082e8761be9969bb979dc9136332de3168aa1a083ae995719ac16db8ec8e4052164e89d93b64b060828e6f37ed1567914b284d262452282e
3198720e274a939cd08a54f980ae38a38f56e422a3a641c8bbd048f7757da0f19b017cc524bd62107bd5001996509affb3fd381a89672f1f165dfe514173d985
0528a2c6cce0239baa4c04ca5bbabac4df000000ffff0300504b01022d0014000600080000002100e9de0fbfff0000001c020000130000000000000000000000
0000000000005b436f6e74656e745f54797065735d2e786d6c504b01022d0014000600080000002100a5d6a7e7c0000000360100000b00000000000000000000
000000300100005f72656c732f2e72656c73504b01022d00140006000800000021006b799616830000008a0000001c0000000000000000000000000019020000
7468656d652f7468656d652f7468656d654d616e616765722e786d6c504b01022d0014000600080000002100b6f4679893070000c92000001600000000000000
000000000000d60200007468656d652f7468656d652f7468656d65312e786d6c504b01022d00140006000800000021000dd1909fb60000001b01000027000000
000000000000000000009d0a00007468656d652f7468656d652f5f72656c732f7468656d654d616e616765722e786d6c2e72656c73504b050600000000050005005d010000980b00000000}
{\*\colorschememapping 3c3f786d6c2076657273696f6e3d22312e302220656e636f64696e673d225554462d3822207374616e64616c6f6e653d22796573223f3e0d0a3c613a636c724d
617020786d6c6e733a613d22687474703a2f2f736368656d61732e6f70656e786d6c666f726d6174732e6f72672f64726177696e676d6c2f323030362f6d6169
6e22206267313d226c743122207478313d22646b3122206267323d226c743222207478323d22646b322220616363656e74313d22616363656e74312220616363
656e74323d22616363656e74322220616363656e74333d22616363656e74332220616363656e74343d22616363656e74342220616363656e74353d22616363656e74352220616363656e74363d22616363656e74362220686c696e6b3d22686c696e6b2220666f6c486c696e6b3d22666f6c486c696e6b222f3e}
{\*\latentstyles\lsdstimax376\lsdlockeddef0\lsdsemihiddendef0\lsdunhideuseddef0\lsdqformatdef0\lsdprioritydef99{\lsdlockedexcept \lsdqformat1 \lsdpriority0 \lsdlocked0 Normal;\lsdqformat1 \lsdpriority9 \lsdlocked0 heading 1;
\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 2;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 3;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 4;
\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 5;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 6;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 7;
\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 8;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 9;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 1;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 5;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 6;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 7;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 8;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 9;
\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 1;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 2;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 3;
\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 4;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 5;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 6;
\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 7;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 8;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 9;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Normal Indent;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 footnote text;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 annotation text;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 header;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 footer;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index heading;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority35 \lsdlocked0 caption;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 table of figures;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 envelope address;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 envelope return;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 footnote reference;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 annotation reference;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 line number;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 page number;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 endnote reference;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 endnote text;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 table of authorities;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 macro;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 toa heading;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Bullet;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Number;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List 3;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List 5;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Bullet 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Bullet 3;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Bullet 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Bullet 5;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Number 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Number 3;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Number 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Number 5;\lsdqformat1 \lsdpriority10 \lsdlocked0 Title;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Closing;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Signature;\lsdsemihidden1 \lsdunhideused1 \lsdpriority1 \lsdlocked0 Default Paragraph Font;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text Indent;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Continue;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Continue 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Continue 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Continue 4;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Continue 5;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Message Header;\lsdqformat1 \lsdpriority11 \lsdlocked0 Subtitle;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Salutation;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Date;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text First Indent;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text First Indent 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Note Heading;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text Indent 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text Indent 3;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Block Text;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Hyperlink;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 FollowedHyperlink;\lsdqformat1 \lsdpriority22 \lsdlocked0 Strong;
\lsdqformat1 \lsdpriority20 \lsdlocked0 Emphasis;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Document Map;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Plain Text;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 E-mail Signature;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Top of Form;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Bottom of Form;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Normal (Web);\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Acronym;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Address;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Cite;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Code;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Definition;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Keyboard;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Preformatted;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Sample;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Typewriter;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Variable;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Normal Table;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 annotation subject;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 No List;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Outline List 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Outline List 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Outline List 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Simple 1;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Simple 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Simple 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Classic 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Classic 2;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Classic 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Classic 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Colorful 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Colorful 2;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Colorful 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Columns 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Columns 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Columns 3;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Columns 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Columns 5;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 2;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 5;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 6;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 7;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 8;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 2;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 5;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 6;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 7;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 8;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table 3D effects 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table 3D effects 2;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table 3D effects 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Contemporary;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Elegant;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Professional;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Subtle 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Subtle 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Web 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Web 2;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Web 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Balloon Text;\lsdpriority39 \lsdlocked0 Table Grid;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Theme;\lsdsemihidden1 \lsdlocked0 Placeholder Text;
\lsdqformat1 \lsdpriority1 \lsdlocked0 No Spacing;\lsdpriority60 \lsdlocked0 Light Shading;\lsdpriority61 \lsdlocked0 Light List;\lsdpriority62 \lsdlocked0 Light Grid;\lsdpriority63 \lsdlocked0 Medium Shading 1;\lsdpriority64 \lsdlocked0 Medium Shading 2;
\lsdpriority65 \lsdlocked0 Medium List 1;\lsdpriority66 \lsdlocked0 Medium List 2;\lsdpriority67 \lsdlocked0 Medium Grid 1;\lsdpriority68 \lsdlocked0 Medium Grid 2;\lsdpriority69 \lsdlocked0 Medium Grid 3;\lsdpriority70 \lsdlocked0 Dark List;
\lsdpriority71 \lsdlocked0 Colorful Shading;\lsdpriority72 \lsdlocked0 Colorful List;\lsdpriority73 \lsdlocked0 Colorful Grid;\lsdpriority60 \lsdlocked0 Light Shading Accent 1;\lsdpriority61 \lsdlocked0 Light List Accent 1;
\lsdpriority62 \lsdlocked0 Light Grid Accent 1;\lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 1;\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 1;\lsdpriority65 \lsdlocked0 Medium List 1 Accent 1;\lsdsemihidden1 \lsdlocked0 Revision;
\lsdqformat1 \lsdpriority34 \lsdlocked0 List Paragraph;\lsdqformat1 \lsdpriority29 \lsdlocked0 Quote;\lsdqformat1 \lsdpriority30 \lsdlocked0 Intense Quote;\lsdpriority66 \lsdlocked0 Medium List 2 Accent 1;\lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 1;
\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 1;\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 1;\lsdpriority70 \lsdlocked0 Dark List Accent 1;\lsdpriority71 \lsdlocked0 Colorful Shading Accent 1;\lsdpriority72 \lsdlocked0 Colorful List Accent 1;
\lsdpriority73 \lsdlocked0 Colorful Grid Accent 1;\lsdpriority60 \lsdlocked0 Light Shading Accent 2;\lsdpriority61 \lsdlocked0 Light List Accent 2;\lsdpriority62 \lsdlocked0 Light Grid Accent 2;\lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 2;
\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 2;\lsdpriority65 \lsdlocked0 Medium List 1 Accent 2;\lsdpriority66 \lsdlocked0 Medium List 2 Accent 2;\lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 2;\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 2;
\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 2;\lsdpriority70 \lsdlocked0 Dark List Accent 2;\lsdpriority71 \lsdlocked0 Colorful Shading Accent 2;\lsdpriority72 \lsdlocked0 Colorful List Accent 2;\lsdpriority73 \lsdlocked0 Colorful Grid Accent 2;
\lsdpriority60 \lsdlocked0 Light Shading Accent 3;\lsdpriority61 \lsdlocked0 Light List Accent 3;\lsdpriority62 \lsdlocked0 Light Grid Accent 3;\lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 3;\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 3;
\lsdpriority65 \lsdlocked0 Medium List 1 Accent 3;\lsdpriority66 \lsdlocked0 Medium List 2 Accent 3;\lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 3;\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 3;\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 3;
\lsdpriority70 \lsdlocked0 Dark List Accent 3;\lsdpriority71 \lsdlocked0 Colorful Shading Accent 3;\lsdpriority72 \lsdlocked0 Colorful List Accent 3;\lsdpriority73 \lsdlocked0 Colorful Grid Accent 3;\lsdpriority60 \lsdlocked0 Light Shading Accent 4;
\lsdpriority61 \lsdlocked0 Light List Accent 4;\lsdpriority62 \lsdlocked0 Light Grid Accent 4;\lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 4;\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 4;\lsdpriority65 \lsdlocked0 Medium List 1 Accent 4;
\lsdpriority66 \lsdlocked0 Medium List 2 Accent 4;\lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 4;\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 4;\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 4;\lsdpriority70 \lsdlocked0 Dark List Accent 4;
\lsdpriority71 \lsdlocked0 Colorful Shading Accent 4;\lsdpriority72 \lsdlocked0 Colorful List Accent 4;\lsdpriority73 \lsdlocked0 Colorful Grid Accent 4;\lsdpriority60 \lsdlocked0 Light Shading Accent 5;\lsdpriority61 \lsdlocked0 Light List Accent 5;
\lsdpriority62 \lsdlocked0 Light Grid Accent 5;\lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 5;\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 5;\lsdpriority65 \lsdlocked0 Medium List 1 Accent 5;\lsdpriority66 \lsdlocked0 Medium List 2 Accent 5;
\lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 5;\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 5;\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 5;\lsdpriority70 \lsdlocked0 Dark List Accent 5;\lsdpriority71 \lsdlocked0 Colorful Shading Accent 5;
\lsdpriority72 \lsdlocked0 Colorful List Accent 5;\lsdpriority73 \lsdlocked0 Colorful Grid Accent 5;\lsdpriority60 \lsdlocked0 Light Shading Accent 6;\lsdpriority61 \lsdlocked0 Light List Accent 6;\lsdpriority62 \lsdlocked0 Light Grid Accent 6;
\lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 6;\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 6;\lsdpriority65 \lsdlocked0 Medium List 1 Accent 6;\lsdpriority66 \lsdlocked0 Medium List 2 Accent 6;
\lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 6;\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 6;\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 6;\lsdpriority70 \lsdlocked0 Dark List Accent 6;\lsdpriority71 \lsdlocked0 Colorful Shading Accent 6;
\lsdpriority72 \lsdlocked0 Colorful List Accent 6;\lsdpriority73 \lsdlocked0 Colorful Grid Accent 6;\lsdqformat1 \lsdpriority19 \lsdlocked0 Subtle Emphasis;\lsdqformat1 \lsdpriority21 \lsdlocked0 Intense Emphasis;
\lsdqformat1 \lsdpriority31 \lsdlocked0 Subtle Reference;\lsdqformat1 \lsdpriority32 \lsdlocked0 Intense Reference;\lsdqformat1 \lsdpriority33 \lsdlocked0 Book Title;\lsdsemihidden1 \lsdunhideused1 \lsdpriority37 \lsdlocked0 Bibliography;
\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority39 \lsdlocked0 TOC Heading;\lsdpriority41 \lsdlocked0 Plain Table 1;\lsdpriority42 \lsdlocked0 Plain Table 2;\lsdpriority43 \lsdlocked0 Plain Table 3;\lsdpriority44 \lsdlocked0 Plain Table 4;
\lsdpriority45 \lsdlocked0 Plain Table 5;\lsdpriority40 \lsdlocked0 Grid Table Light;\lsdpriority46 \lsdlocked0 Grid Table 1 Light;\lsdpriority47 \lsdlocked0 Grid Table 2;\lsdpriority48 \lsdlocked0 Grid Table 3;\lsdpriority49 \lsdlocked0 Grid Table 4;
\lsdpriority50 \lsdlocked0 Grid Table 5 Dark;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful;\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful;\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 1;\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 1;
\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 1;\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 1;\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 1;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 1;
\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 1;\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 2;\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 2;\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 2;
\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 2;\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 2;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 2;\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 2;
\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 3;\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 3;\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 3;\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 3;
\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 3;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 3;\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 3;\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 4;
\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 4;\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 4;\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 4;\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 4;
\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 4;\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 4;\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 5;\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 5;
\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 5;\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 5;\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 5;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 5;
\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 5;\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 6;\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 6;\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 6;
\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 6;\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 6;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 6;\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 6;
\lsdpriority46 \lsdlocked0 List Table 1 Light;\lsdpriority47 \lsdlocked0 List Table 2;\lsdpriority48 \lsdlocked0 List Table 3;\lsdpriority49 \lsdlocked0 List Table 4;\lsdpriority50 \lsdlocked0 List Table 5 Dark;
\lsdpriority51 \lsdlocked0 List Table 6 Colorful;\lsdpriority52 \lsdlocked0 List Table 7 Colorful;\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 1;\lsdpriority47 \lsdlocked0 List Table 2 Accent 1;\lsdpriority48 \lsdlocked0 List Table 3 Accent 1;
\lsdpriority49 \lsdlocked0 List Table 4 Accent 1;\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 1;\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 1;\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 1;
\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 2;\lsdpriority47 \lsdlocked0 List Table 2 Accent 2;\lsdpriority48 \lsdlocked0 List Table 3 Accent 2;\lsdpriority49 \lsdlocked0 List Table 4 Accent 2;
\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 2;\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 2;\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 2;\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 3;
\lsdpriority47 \lsdlocked0 List Table 2 Accent 3;\lsdpriority48 \lsdlocked0 List Table 3 Accent 3;\lsdpriority49 \lsdlocked0 List Table 4 Accent 3;\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 3;
\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 3;\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 3;\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 4;\lsdpriority47 \lsdlocked0 List Table 2 Accent 4;
\lsdpriority48 \lsdlocked0 List Table 3 Accent 4;\lsdpriority49 \lsdlocked0 List Table 4 Accent 4;\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 4;\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 4;
\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 4;\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 5;\lsdpriority47 \lsdlocked0 List Table 2 Accent 5;\lsdpriority48 \lsdlocked0 List Table 3 Accent 5;
\lsdpriority49 \lsdlocked0 List Table 4 Accent 5;\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 5;\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 5;\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 5;
\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 6;\lsdpriority47 \lsdlocked0 List Table 2 Accent 6;\lsdpriority48 \lsdlocked0 List Table 3 Accent 6;\lsdpriority49 \lsdlocked0 List Table 4 Accent 6;
\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 6;\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 6;\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 6;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Mention;
\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Smart Hyperlink;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Hashtag;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Unresolved Mention;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Smart Link;}}{\*\datastore 01050000
02000000180000004d73786d6c322e534158584d4c5265616465722e362e3000000000000000000000060000
d0cf11e0a1b11ae1000000000000000000000000000000003e000300feff090006000000000000000000000001000000010000000000000000100000feffffff00000000feffffff0000000000000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
fffffffffffffffffdfffffffeffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
ffffffffffffffffffffffffffffffff52006f006f007400200045006e00740072007900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000016000500ffffffffffffffffffffffff0c6ad98892f1d411a65f0040963251e5000000000000000000000000d0af
77916744d801feffffff00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffff00000000000000000000000000000000000000000000000000000000
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffff0000000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffff000000000000000000000000000000000000000000000000
0000000000000000000000000000000000000000000000000105000000000000}}

View File

@@ -0,0 +1,13 @@
A Text Extraction Test Document
for
DSpace
This is a text. For the next sixty seconds this software will conduct a test of the DSpace text extraction facility. This is only a text.
This is a paragraph that followed the first that lived in the document that Jack built.
Lorem ipsum dolor sit amet. The quick brown fox jumped over the lazy dog. Yow! Are we having fun yet?
This has been a test of the DSpace text extraction system. In the event of actual content you would care what is written here.
Tip o the hat to the U.S. Emergency Broadcast System for the format that I have irreverently borrowed.

View File

@@ -45,6 +45,13 @@
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
<version>${spring-boot.version}</version>
<exclusions>
<!-- Later version provided by dspace-api -->
<exclusion>
<groupId>org.hibernate.validator</groupId>
<artifactId>hibernate-validator</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>

View File

@@ -12,14 +12,11 @@ import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.validator.routines.UrlValidator;
import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery;
@@ -35,7 +32,6 @@ import org.dspace.app.iiif.model.generator.ContentAsTextGenerator;
import org.dspace.app.iiif.model.generator.ManifestGenerator;
import org.dspace.app.iiif.model.generator.SearchResultGenerator;
import org.dspace.app.iiif.service.utils.IIIFUtils;
import org.dspace.discovery.SolrSearchCore;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.springframework.beans.factory.annotation.Autowired;
@@ -66,9 +62,6 @@ public class WordHighlightSolrSearch implements SearchAnnotationService {
@Autowired
SearchResultGenerator searchResult;
@Autowired
SolrSearchCore solrSearchCore;
@Autowired
ManifestGenerator manifestGenerator;
@@ -167,26 +160,49 @@ public class WordHighlightSolrSearch implements SearchAnnotationService {
private String getAnnotationList(UUID uuid, String json, String query) {
searchResult.setIdentifier(manifestId + "/search?q="
+ URLEncoder.encode(query, StandardCharsets.UTF_8));
GsonBuilder builder = new GsonBuilder();
Gson gson = builder.create();
JsonObject body = gson.fromJson(json, JsonObject.class);
ObjectMapper mapper = new ObjectMapper();
JsonNode body = null;
try {
body = mapper.readTree(json);
} catch (JsonProcessingException e) {
log.error("Unable to process json response.", e);
}
// If error occurred or no body, return immediately
if (body == null) {
log.warn("Unable to process json response.");
return utils.asJson(searchResult.generateResource());
}
// outer ocr highlight element
JsonObject highs = body.getAsJsonObject("ocrHighlighting");
// highlight entries
for (Map.Entry<String, JsonElement> ocrIds: highs.entrySet()) {
// ocr_text
JsonObject ocrObj = ocrIds.getValue().getAsJsonObject().getAsJsonObject("ocr_text");
// snippets array
if (ocrObj != null) {
for (JsonElement snippetArray : ocrObj.getAsJsonObject().get("snippets").getAsJsonArray()) {
String pageId = getCanvasId(snippetArray.getAsJsonObject().get("pages"));
for (JsonElement highlights : snippetArray.getAsJsonObject().getAsJsonArray("highlights")) {
for (JsonElement highlight : highlights.getAsJsonArray()) {
searchResult.addResource(getAnnotation(highlight, pageId, uuid));
// Example structure of Solr response available at
// https://github.com/dbmdz/solr-ocrhighlighting/blob/main/docs/query.md
// Get the outer ocrHighlighting node
JsonNode highs = body.get("ocrHighlighting");
if (highs != null) {
// Loop through each highlight entry under ocrHighlighting
for (final JsonNode highEntry : highs) {
// Get the ocr_text node under the entry
JsonNode ocrNode = highEntry.get("ocr_text");
if (ocrNode != null) {
// Loop through the snippets array under that
for (final JsonNode snippet : ocrNode.get("snippets")) {
if (snippet != null) {
// Get a canvas ID based on snippet's pages
String pageId = getCanvasId(snippet.get("pages"));
if (pageId != null) {
// Loop through array of highlights for each snippet.
for (final JsonNode highlights : snippet.get("highlights")) {
if (highlights != null) {
// May be multiple word highlights on a page, so loop through them.
for (int i = 0; i < highlights.size(); i++) {
// Add annotation associated with each highlight
AnnotationGenerator anno = getAnnotation(highlights.get(i), pageId, uuid);
if (anno != null) {
searchResult.addResource(anno);
}
}
}
}
}
}
}
}
@@ -198,22 +214,25 @@ public class WordHighlightSolrSearch implements SearchAnnotationService {
/**
* Returns the annotation generator for the highlight.
* @param highlight highlight element from solor response
* @param highlight highlight node from Solr response
* @param pageId page id from solr response
* @return generator for a single annotation
*/
private AnnotationGenerator getAnnotation(JsonElement highlight, String pageId, UUID uuid) {
JsonObject hcoords = highlight.getAsJsonObject();
String text = (hcoords.get("text").getAsString());
int ulx = hcoords.get("ulx").getAsInt();
int uly = hcoords.get("uly").getAsInt();
int lrx = hcoords.get("lrx").getAsInt();
int lry = hcoords.get("lry").getAsInt();
String w = Integer.toString(lrx - ulx);
String h = Integer.toString(lry - uly);
private AnnotationGenerator getAnnotation(JsonNode highlight, String pageId, UUID uuid) {
String text = highlight.get("text") != null ? highlight.get("text").asText() : null;
int ulx = highlight.get("ulx") != null ? highlight.get("ulx").asInt() : -1;
int uly = highlight.get("uly") != null ? highlight.get("uly").asInt() : -1;
int lrx = highlight.get("lrx") != null ? highlight.get("lrx").asInt() : -1;
int lry = highlight.get("lry") != null ? highlight.get("lry").asInt() : -1;
String w = (lrx >= 0 && ulx >= 0) ? Integer.toString(lrx - ulx) : null;
String h = (lry >= 0 && uly >= 0) ? Integer.toString(lry - uly) : null;
if (text != null && w != null && h != null) {
String params = ulx + "," + uly + "," + w + "," + h;
return createSearchResultAnnotation(params, text, pageId, uuid);
}
return null;
}
/**
* Returns position of canvas. Uses the "pages" id attribute.
@@ -221,16 +240,23 @@ public class WordHighlightSolrSearch implements SearchAnnotationService {
* delimited with a "." and that the integer corresponds to the
* canvas identifier in the manifest. For METS/ALTO documents, the page
* order can be derived from the METS file when loading the solr index.
* @param element the pages element
* @return canvas id
* @param pagesNode the pages node
* @return canvas id or null if node was null
*/
private String getCanvasId(JsonElement element) {
JsonArray pages = element.getAsJsonArray();
JsonObject page = pages.get(0).getAsJsonObject();
String[] identArr = page.get("id").getAsString().split("\\.");
private String getCanvasId(JsonNode pagesNode) {
if (pagesNode != null) {
JsonNode page = pagesNode.get(0);
if (page != null) {
JsonNode pageId = page.get("id");
if (pageId != null) {
String[] identArr = pageId.asText().split("\\.");
// the canvas id.
return "c" + identArr[1];
}
}
}
return null;
}
/**
* Creates annotation with word highlight coordinates.

View File

@@ -82,10 +82,6 @@
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
</exclusion>
<exclusion>
<groupId>xml-apis</groupId>
<artifactId>xml-apis</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>

View File

@@ -22,6 +22,7 @@ import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
public class DSpaceResourceResolver implements ResourceResolver {
// Requires usage of Saxon as OAI-PMH uses some XSLT 2 functions
private static final TransformerFactory transformerFactory = TransformerFactory
.newInstance("net.sf.saxon.TransformerFactoryImpl", null);

View File

@@ -19,6 +19,7 @@ import javax.xml.transform.stream.StreamSource;
import org.apache.commons.io.IOUtils;
public abstract class AbstractXSLTest {
// Requires usage of Saxon as OAI-PMH uses some XSLT 2 functions
private static final TransformerFactory factory = TransformerFactory
.newInstance("net.sf.saxon.TransformerFactoryImpl", null);

View File

@@ -63,45 +63,11 @@
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-json-jackson</artifactId>
<version>${jersey.version}</version>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-module-jaxb-annotations</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.jaxrs</groupId>
<artifactId>jackson-jaxrs-base</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.jaxrs</groupId>
<artifactId>jackson-jaxrs-json-provider</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.module</groupId>
<artifactId>jackson-module-jaxb-annotations</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.jaxrs</groupId>
<artifactId>jackson-jaxrs-base</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.jaxrs</groupId>
<artifactId>jackson-jaxrs-json-provider</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.module</groupId>
<artifactId>jackson-module-jaxb-annotations</artifactId>
<version>${jackson.version}</version>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-jaxb</artifactId>
<version>${jersey.version}</version>
</dependency>
<!-- Spring dependencies -->
@@ -156,11 +122,6 @@
<groupId>jakarta.annotation</groupId>
<artifactId>jakarta.annotation-api</artifactId>
</exclusion>
<!-- Newer version provided by solr-cell -->
<exclusion>
<groupId>org.ow2.asm</groupId>
<artifactId>asm-commons</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
@@ -193,30 +154,9 @@
<version>${spring-security.version}</version>
</dependency>
<dependency>
<groupId>cglib</groupId>
<artifactId>cglib</artifactId>
<version>2.2.2</version>
</dependency>
<!-- Use DSpace, for now, an older version to minimize spring generated dependency on Discovery -->
<dependency>
<groupId>org.dspace</groupId>
<artifactId>dspace-api</artifactId>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Connecting to DSpace datasource sets a dependency on Postgres DB-->

View File

@@ -22,7 +22,10 @@ The only tested way right now is to run this webapp inside your IDE (Eclipse). J
> dspace.dir = d:/install/dspace7
## HAL Browser
The modified version of the HAL Browser from the Spring Data REST project is included, the index.html file is overriden locally to support the /api baseURL (see [DATAREST-971](https://jira.spring.io/browse/DATAREST-971))
The modified version of the HAL Browser from https://github.com/mikekelly/hal-browser
We've updated/customized the HAL Browser to integrate better with our authentication system, provide CSRF support, and use a more recent version of its dependencies.
## Packages and main classes
*[org.dspace.app.rest.Application](src/main/java/org/dspace/app/rest/Application.java)* is the spring boot main class it initializes

View File

@@ -68,7 +68,9 @@
<!--Skip license check of third party files included/customized from HAL Browser -->
<exclude>src/main/webapp/index.html</exclude>
<exclude>src/main/webapp/login.html</exclude>
<exclude>src/main/webapp/styles.css</exclude>
<exclude>src/main/webapp/js/hal/**</exclude>
<exclude>src/main/webapp/js/vendor/**</exclude>
</excludes>
</configuration>
</plugin>
@@ -238,28 +240,18 @@
<dependencies>
<!-- These next two dependencies build a WAR that is BOTH executable
AND deployable into an external container (Tomcat).
See: http://docs.spring.io/spring-boot/docs/current/reference/htmlsingle/#build-tool-plugins-maven-packaging -->
<!-- NOTE: For rapid development (if you don't need Solr or other webapps),
you can temporarily comment these out, and switch <packaging> to "jar".
This lets you develop in a standalone, runnable JAR application. -->
<!--<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>-->
<!-- Ensure embedded servlet container doesn't interfere when this
WAR is deployed to an external Tomcat (i.e. provided). -->
<!--<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-tomcat</artifactId>
<scope>provided</scope>
</dependency>-->
<!-- Spring Boot dependencies -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
<version>${spring-boot.version}</version>
<exclusions>
<!-- Later version provided by dspace-api -->
<exclusion>
<groupId>org.hibernate.validator</groupId>
<artifactId>hibernate-validator</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
@@ -286,20 +278,24 @@
<version>0.4.6</version>
</dependency>
<!-- The HAL Browser -->
<!-- HAL Browser (via WebJars) : https://github.com/mikekelly/hal-browser -->
<!-- This is primarily used to pull in the HAL Browser core Javascript code ('js' folder), as we've overridden
many dependencies below and the HTML pages in src/main/webapp/ -->
<!-- NOTE: Eventually this should be replaced by the HAL Explorer included in Spring Data REST,
see https://github.com/DSpace/DSpace/issues/3017 -->
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-rest-hal-browser</artifactId>
<version>${spring-hal-browser.version}</version>
<groupId>org.webjars</groupId>
<artifactId>hal-browser</artifactId>
<version>ad9b865</version>
</dependency>
<!-- WebJars dependencies used to update/enhance the default HAL Browser -->
<!-- Pull in several WebJars dependencies used to update/enhance the default HAL Browser -->
<!-- Pull in current version of JQuery via WebJars
Made available at: webjars/jquery/dist/jquery.min.js -->
<dependency>
<groupId>org.webjars.bowergithub.jquery</groupId>
<artifactId>jquery-dist</artifactId>
<version>3.5.1</version>
<version>3.6.0</version>
</dependency>
<!-- Pull in current version of Toastr (toastrjs.com) via WebJars
Made available at: webjars/toastr/build/toastr.min.js -->
@@ -308,17 +304,46 @@
<artifactId>toastr</artifactId>
<version>2.1.4</version>
</dependency>
<!-- Also pull in current version of Bootstrap via WebJars. This is currently ONLY used by our OAI-PMH
interface. But, it is include here so that it's accessible to all interfaces enabled in server webapp.
<!-- Pull in current version of URI.js (https://medialize.github.io/URI.js/) via WebJars
Made available at: webjars/urijs/src/URI.min.js -->
<dependency>
<groupId>org.webjars.bowergithub.medialize</groupId>
<artifactId>uri.js</artifactId>
<version>1.19.10</version>
</dependency>
<!-- Pull in current version of Underscore.js (https://underscorejs.org/) via WebJars
Made available at: webjars/underscore/underscore-min.js -->
<dependency>
<groupId>org.webjars.bowergithub.jashkenas</groupId>
<artifactId>underscore</artifactId>
<version>1.13.2</version>
</dependency>
<!-- Pull in current version of Backbone.js (http://backbonejs.org/) via WebJars
Made available at: webjars/backbone/backbone-min.js -->
<dependency>
<groupId>org.webjars.bowergithub.jashkenas</groupId>
<artifactId>backbone</artifactId>
<version>1.4.1</version>
</dependency>
<!-- Pull in current version of json-editor.js (https://github.com/json-editor/json-editor) via WebJars
Made available at: webjars/json-editor__json-editor/2.6.1/dist/jsoneditor.js
(Required by js/vendor/CustomPostForm.js)
NOTE: Because the path contains the version, you MUST update index.html when updating this dependency -->
<dependency>
<groupId>org.webjars.npm</groupId>
<artifactId>json-editor__json-editor</artifactId>
<version>2.6.1</version>
</dependency>
<!-- Also pull in current version of Bootstrap via WebJars.
This is used by BOTH our HAL Browser and our OAI-PMH interface.
Made available at: webjars/bootstrap/dist/js/bootstrap.min.js and
webjars/bootstrap/dist/css/bootstrap.min.css -->
<dependency>
<groupId>org.webjars.bowergithub.twbs</groupId>
<artifactId>bootstrap</artifactId>
<version>4.5.2</version>
<version>4.6.1</version>
</dependency>
<!-- Add in Spring Security for AuthN and AuthZ -->
<dependency>
<groupId>org.springframework.boot</groupId>
@@ -364,13 +389,12 @@
<artifactId>dspace-services</artifactId>
</dependency>
<!-- DSpace modules to deploy (these modules are all optional, but add features/endpoints to webapp) -->
<!-- You may choose to comment out any of these modules if you do not want/need its features -->
<dependency>
<groupId>org.dspace</groupId>
<artifactId>dspace-iiif</artifactId>
</dependency>
<!-- DSpace modules to deploy (this modules are all optional, but add features/endpoints to webapp) -->
<!-- You may choose to comment out any of these modules if you do not want/need its features -->
<dependency>
<groupId>org.dspace</groupId>
<artifactId>dspace-oai</artifactId>
@@ -416,6 +440,32 @@
<version>${nimbus-jose-jwt.version}</version>
</dependency>
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-solrj</artifactId>
<version>${solr.client.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-cache
Caching dependencies for iiif endpoint. -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-cache</artifactId>
<version>${spring-boot.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/javax.cache/cache-api -->
<dependency>
<groupId>javax.cache</groupId>
<artifactId>cache-api</artifactId>
<version>1.1.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.ehcache/ehcache -->
<dependency>
<groupId>org.ehcache</groupId>
<artifactId>ehcache</artifactId>
<version>${ehcache.version}</version>
</dependency>
<!-- TEST DEPENDENCIES -->
<dependency>
<groupId>org.springframework.boot</groupId>
@@ -431,13 +481,11 @@
<dependency>
<groupId>com.jayway.jsonpath</groupId>
<artifactId>json-path</artifactId>
<version>${json-path.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.jayway.jsonpath</groupId>
<artifactId>json-path-assert</artifactId>
<version>${json-path.version}</version>
<scope>test</scope>
</dependency>
<dependency>
@@ -488,50 +536,6 @@
</exclusion>
</exclusions>
</dependency>
<!-- https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-cache
Caching dependencies for iiif endpoint. -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-cache</artifactId>
<version>${spring-boot.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/javax.cache/cache-api -->
<dependency>
<groupId>javax.cache</groupId>
<artifactId>cache-api</artifactId>
<version>1.1.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.ehcache/ehcache -->
<dependency>
<groupId>org.ehcache</groupId>
<artifactId>ehcache</artifactId>
<version>${ehcache.version}</version>
</dependency>
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-cell</artifactId>
<scope>test</scope>
<exclusions>
<!-- Newer version brought in by opencsv -->
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
</exclusion>
<!-- Newer Jetty version brought in via Parent POM -->
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-io</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-icu</artifactId>

View File

@@ -40,6 +40,7 @@ import org.springframework.web.cors.CorsConfiguration;
import org.springframework.web.method.support.HandlerMethodArgumentResolver;
import org.springframework.web.servlet.config.annotation.CorsRegistry;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.ViewControllerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
/**
@@ -192,13 +193,31 @@ public class Application extends SpringBootServletInitializer {
}
}
/**
* Add a ViewController for the root path, to load HAL Browser
* @param registry ViewControllerRegistry
*/
@Override
public void addViewControllers(ViewControllerRegistry registry) {
// Ensure accessing the root path will load the index.html of the HAL Browser
registry.addViewController("/").setViewName("forward:/index.html");
}
/**
* Add a new ResourceHandler to allow us to use WebJars.org to pull in web dependencies
* dynamically for HAL Browser, and access them off the /webjars path.
* dynamically for HAL Browser, etc.
* @param registry ResourceHandlerRegistry
*/
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
// First, "mount" the Hal Browser resources at the /browser path
// NOTE: the hal-browser directory uses the version of the Hal browser, so this needs to be synced
// with the org.webjars.hal-browser version in the POM
registry
.addResourceHandler("/browser/**")
.addResourceLocations("/webjars/hal-browser/ad9b865/");
// Make all other Webjars available off the /webjars path
registry
.addResourceHandler("/webjars/**")
.addResourceLocations("/webjars/");

View File

@@ -80,7 +80,7 @@ public class AuthenticationRestController implements InitializingBean {
@Override
public void afterPropertiesSet() {
discoverableEndpointsService
.register(this, Arrays.asList(new Link("/api/" + AuthnRest.CATEGORY, AuthnRest.NAME)));
.register(this, Arrays.asList(Link.of("/api/" + AuthnRest.CATEGORY, AuthnRest.NAME)));
}
@RequestMapping(method = RequestMethod.GET)

View File

@@ -12,7 +12,6 @@ import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFI
import static org.springframework.web.bind.annotation.RequestMethod.PUT;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.List;
import java.util.UUID;
@@ -22,7 +21,6 @@ import javax.ws.rs.core.Response;
import org.apache.catalina.connector.ClientAbortException;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.logging.log4j.Logger;
import org.dspace.app.rest.converter.ConverterService;
import org.dspace.app.rest.exception.DSpaceBadRequestException;
@@ -133,19 +131,12 @@ public class BitstreamRestController {
}
try {
long filesize;
if (citationDocumentService.isCitationEnabledForBitstream(bit, context)) {
final Pair<InputStream, Long> citedDocument = citationDocumentService.makeCitedDocument(context, bit);
filesize = citedDocument.getRight();
citedDocument.getLeft().close();
} else {
filesize = bit.getSizeBytes();
}
long filesize = bit.getSizeBytes();
Boolean citationEnabledForBitstream = citationDocumentService.isCitationEnabledForBitstream(bit, context);
HttpHeadersInitializer httpHeadersInitializer = new HttpHeadersInitializer()
.withBufferSize(BUFFER_SIZE)
.withFileName(name)
.withLength(filesize)
.withChecksum(bit.getChecksum())
.withMimetype(mimetype)
.with(request)
@@ -161,11 +152,9 @@ public class BitstreamRestController {
httpHeadersInitializer.withDisposition(HttpHeadersInitializer.CONTENT_DISPOSITION_ATTACHMENT);
}
org.dspace.app.rest.utils.BitstreamResource bitstreamResource =
new org.dspace.app.rest.utils.BitstreamResource(
bit, name, uuid, filesize, currentUser != null ? currentUser.getID() : null);
name, uuid, currentUser != null ? currentUser.getID() : null, citationEnabledForBitstream);
//We have all the data we need, close the connection to the database so that it doesn't stay open during
//download/streaming

View File

@@ -59,7 +59,7 @@ import org.springframework.web.multipart.MultipartFile;
* </pre>
*/
@RestController
@RequestMapping("/api/" + BundleRest.CATEGORY + "/" + BundleRest.PLURAL_NAME + "/"
@RequestMapping("/api/" + BundleRest.CATEGORY + "/" + BundleRest.PLURAL_NAME
+ REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID + "/" + BitstreamRest.PLURAL_NAME)
public class BundleUploadBitstreamController {

View File

@@ -73,7 +73,7 @@ public class DiscoveryRestController implements InitializingBean {
@Override
public void afterPropertiesSet() throws Exception {
discoverableEndpointsService
.register(this, Arrays.asList(new Link("/api/" + SearchResultsRest.CATEGORY, SearchResultsRest.CATEGORY)));
.register(this, Arrays.asList(Link.of("/api/" + SearchResultsRest.CATEGORY, SearchResultsRest.CATEGORY)));
}
@RequestMapping(method = RequestMethod.GET)

View File

@@ -66,8 +66,8 @@ public class IdentifierRestController implements InitializingBean {
discoverableEndpointsService
.register(this,
Arrays.asList(
new Link(
new UriTemplate("/api/" + CATEGORY + "/" + ACTION,
Link.of(
UriTemplate.of("/api/" + CATEGORY + "/" + ACTION,
new TemplateVariables(
new TemplateVariable(PARAM, VariableType.REQUEST_PARAM))),
CATEGORY)));

View File

@@ -45,7 +45,7 @@ public class OidcRestController {
@PostConstruct
public void afterPropertiesSet() {
discoverableEndpointsService.register(this, List.of(new Link("/api/" + AuthnRest.CATEGORY, "oidc")));
discoverableEndpointsService.register(this, List.of(Link.of("/api/" + AuthnRest.CATEGORY, "oidc")));
}
@RequestMapping(method = RequestMethod.GET)

Some files were not shown because too many files have changed in this diff Show More