Merge remote-tracking branch '4science-bitbucket/main' into CST-5249_suggestion

This commit is contained in:
Luca Giamminonni
2022-05-02 12:21:48 +02:00
233 changed files with 3453 additions and 2378 deletions

View File

@@ -373,7 +373,7 @@
<dependency> <dependency>
<groupId>org.hibernate.javax.persistence</groupId> <groupId>org.hibernate.javax.persistence</groupId>
<artifactId>hibernate-jpa-2.1-api</artifactId> <artifactId>hibernate-jpa-2.1-api</artifactId>
<version>1.0.0.Final</version> <version>1.0.2.Final</version>
</dependency> </dependency>
<dependency> <dependency>
@@ -394,7 +394,7 @@
<groupId>org.ow2.asm</groupId> <groupId>org.ow2.asm</groupId>
<artifactId>asm-commons</artifactId> <artifactId>asm-commons</artifactId>
</exclusion> </exclusion>
<!-- Newer version of Bouncycastle brought in via solr-cell --> <!-- Newer version of Bouncycastle brought in via Tika -->
<exclusion> <exclusion>
<groupId>org.bouncycastle</groupId> <groupId>org.bouncycastle</groupId>
<artifactId>bcpkix-jdk15on</artifactId> <artifactId>bcpkix-jdk15on</artifactId>
@@ -520,7 +520,7 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.jdom</groupId> <groupId>org.jdom</groupId>
<artifactId>jdom</artifactId> <artifactId>jdom2</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.pdfbox</groupId> <groupId>org.apache.pdfbox</groupId>
@@ -530,10 +530,6 @@
<groupId>org.apache.pdfbox</groupId> <groupId>org.apache.pdfbox</groupId>
<artifactId>fontbox</artifactId> <artifactId>fontbox</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-scratchpad</artifactId>
</dependency>
<dependency> <dependency>
<groupId>xalan</groupId> <groupId>xalan</groupId>
<artifactId>xalan</artifactId> <artifactId>xalan</artifactId>
@@ -581,9 +577,12 @@
</dependency> </dependency>
<!-- Used for RSS / ATOM syndication feeds --> <!-- Used for RSS / ATOM syndication feeds -->
<dependency> <dependency>
<groupId>org.rometools</groupId> <groupId>com.rometools</groupId>
<artifactId>rome</artifactId>
</dependency>
<dependency>
<groupId>com.rometools</groupId>
<artifactId>rome-modules</artifactId> <artifactId>rome-modules</artifactId>
<version>1.0</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.jbibtex</groupId> <groupId>org.jbibtex</groupId>
@@ -603,27 +602,13 @@
<artifactId>httpmime</artifactId> <artifactId>httpmime</artifactId>
</dependency> </dependency>
<!-- SolrJ is used to communicate with Solr throughout the dspace-api -->
<dependency> <dependency>
<groupId>org.apache.solr</groupId> <groupId>org.apache.solr</groupId>
<artifactId>solr-solrj</artifactId> <artifactId>solr-solrj</artifactId>
<version>${solr.client.version}</version> <version>${solr.client.version}</version>
<exclusions>
<!-- Newer Jetty version brought in via Parent POM -->
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-io</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<!-- Solr Core is needed for Integration Tests (to run a MockSolrServer) --> <!-- Solr Core is only needed for Integration Tests (to run a MockSolrServer) -->
<!-- The following Solr / Lucene dependencies also support integration tests --> <!-- The following Solr / Lucene dependencies also support integration tests -->
<dependency> <dependency>
<groupId>org.apache.solr</groupId> <groupId>org.apache.solr</groupId>
@@ -651,39 +636,10 @@
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-cell</artifactId>
<exclusions>
<!-- Newer version brought in by opencsv -->
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
</exclusion>
<!-- Newer Jetty version brought in via Parent POM -->
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-io</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId> <artifactId>lucene-core</artifactId>
</dependency> </dependency>
<!-- Used for full-text indexing with Solr -->
<dependency>
<groupId>org.apache.tika</groupId>
<artifactId>tika-parsers</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-icu</artifactId> <artifactId>lucene-analyzers-icu</artifactId>
@@ -699,9 +655,15 @@
<artifactId>lucene-analyzers-stempel</artifactId> <artifactId>lucene-analyzers-stempel</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<!-- Tika is used to extract full text from documents in order to index in Solr -->
<dependency> <dependency>
<groupId>org.apache.xmlbeans</groupId> <groupId>org.apache.tika</groupId>
<artifactId>xmlbeans</artifactId> <artifactId>tika-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.tika</groupId>
<artifactId>tika-parsers-standard-package</artifactId>
</dependency> </dependency>
<dependency> <dependency>
@@ -759,7 +721,7 @@
<dependency> <dependency>
<groupId>org.flywaydb</groupId> <groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId> <artifactId>flyway-core</artifactId>
<version>6.5.7</version> <version>8.4.4</version>
</dependency> </dependency>
<!-- Google Analytics --> <!-- Google Analytics -->
@@ -815,44 +777,6 @@
<artifactId>jaxb-runtime</artifactId> <artifactId>jaxb-runtime</artifactId>
</dependency> </dependency>
<!-- Apache Axiom -->
<dependency>
<groupId>org.apache.ws.commons.axiom</groupId>
<artifactId>axiom-impl</artifactId>
<version>${axiom.version}</version>
<exclusions>
<!-- Exclude Geronimo as it is NOT necessary when using javax.activation (which we use)
See: https://ws.apache.org/axiom/userguide/ch04.html#d0e732 -->
<exclusion>
<groupId>org.apache.geronimo.specs</groupId>
<artifactId>*</artifactId>
</exclusion>
<!-- Exclude Woodstox, as later version provided by Solr dependencies -->
<exclusion>
<groupId>org.codehaus.woodstox</groupId>
<artifactId>woodstox-core-asl</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.ws.commons.axiom</groupId>
<artifactId>axiom-api</artifactId>
<version>${axiom.version}</version>
<exclusions>
<!-- Exclude Geronimo as it is NOT necessary when using javax.activation (which we use)
See: https://ws.apache.org/axiom/userguide/ch04.html#d0e732 -->
<exclusion>
<groupId>org.apache.geronimo.specs</groupId>
<artifactId>*</artifactId>
</exclusion>
<!-- Exclude Woodstox, as later version provided by Solr dependencies -->
<exclusion>
<groupId>org.codehaus.woodstox</groupId>
<artifactId>woodstox-core-asl</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Jersey / JAX-RS client (javax.ws.rs.*) dependencies needed to integrate with external sources/services --> <!-- Jersey / JAX-RS client (javax.ws.rs.*) dependencies needed to integrate with external sources/services -->
<dependency> <dependency>
<groupId>org.glassfish.jersey.core</groupId> <groupId>org.glassfish.jersey.core</groupId>
@@ -948,13 +872,6 @@
<version>2.0.0</version> <version>2.0.0</version>
</dependency> </dependency>
<dependency>
<groupId>com.github.stefanbirkner</groupId>
<artifactId>system-rules</artifactId>
<version>1.19.0</version>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<groupId>org.mock-server</groupId> <groupId>org.mock-server</groupId>
<artifactId>mockserver-junit-rule</artifactId> <artifactId>mockserver-junit-rule</artifactId>

View File

@@ -52,9 +52,9 @@ import org.dspace.content.service.CommunityService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.EPersonService;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.output.Format; import org.jdom2.output.Format;
import org.jdom.output.XMLOutputter; import org.jdom2.output.XMLOutputter;
import org.w3c.dom.Document; import org.w3c.dom.Document;
import org.w3c.dom.Node; import org.w3c.dom.Node;
import org.w3c.dom.NodeList; import org.w3c.dom.NodeList;
@@ -307,7 +307,7 @@ public class StructBuilder {
} }
// finally write the string into the output file. // finally write the string into the output file.
final org.jdom.Document xmlOutput = new org.jdom.Document(root); final org.jdom2.Document xmlOutput = new org.jdom2.Document(root);
try { try {
new XMLOutputter().output(xmlOutput, output); new XMLOutputter().output(xmlOutput, output);
} catch (IOException e) { } catch (IOException e) {
@@ -411,7 +411,7 @@ public class StructBuilder {
} }
// Now write the structure out. // Now write the structure out.
org.jdom.Document xmlOutput = new org.jdom.Document(rootElement); org.jdom2.Document xmlOutput = new org.jdom2.Document(rootElement);
try { try {
XMLOutputter outputter = new XMLOutputter(Format.getPrettyFormat()); XMLOutputter outputter = new XMLOutputter(Format.getPrettyFormat());
outputter.output(xmlOutput, output); outputter.output(xmlOutput, output);

View File

@@ -41,10 +41,8 @@ public class MetadataDeletionScriptConfiguration<T extends MetadataDeletion> ext
Options options = new Options(); Options options = new Options();
options.addOption("m", "metadata", true, "metadata field name"); options.addOption("m", "metadata", true, "metadata field name");
options.getOption("m").setType(String.class);
options.addOption("l", "list", false, "lists the metadata fields that can be deleted"); options.addOption("l", "list", false, "lists the metadata fields that can be deleted");
options.getOption("l").setType(boolean.class);
super.options = options; super.options = options;
} }

View File

@@ -54,12 +54,9 @@ public class MetadataExportScriptConfiguration<T extends MetadataExport> extends
Options options = new Options(); Options options = new Options();
options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)"); options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)");
options.getOption("i").setType(String.class);
options.addOption("a", "all", false, options.addOption("a", "all", false,
"include all metadata fields that are not normally changed (e.g. provenance)"); "include all metadata fields that are not normally changed (e.g. provenance)");
options.getOption("a").setType(boolean.class);
options.addOption("h", "help", false, "help"); options.addOption("h", "help", false, "help");
options.getOption("h").setType(boolean.class);
super.options = options; super.options = options;

View File

@@ -19,7 +19,6 @@ public class MetadataImportCliScriptConfiguration extends MetadataImportScriptCo
public Options getOptions() { public Options getOptions() {
Options options = super.getOptions(); Options options = super.getOptions();
options.addOption("e", "email", true, "email address or user id of user (required if adding new items)"); options.addOption("e", "email", true, "email address or user id of user (required if adding new items)");
options.getOption("e").setType(String.class);
options.getOption("e").setRequired(true); options.getOption("e").setRequired(true);
super.options = options; super.options = options;
return options; return options;

View File

@@ -59,20 +59,14 @@ public class MetadataImportScriptConfiguration<T extends MetadataImport> extends
options.getOption("f").setRequired(true); options.getOption("f").setRequired(true);
options.addOption("s", "silent", false, options.addOption("s", "silent", false,
"silent operation - doesn't request confirmation of changes USE WITH CAUTION"); "silent operation - doesn't request confirmation of changes USE WITH CAUTION");
options.getOption("s").setType(boolean.class);
options.addOption("w", "workflow", false, "workflow - when adding new items, use collection workflow"); options.addOption("w", "workflow", false, "workflow - when adding new items, use collection workflow");
options.getOption("w").setType(boolean.class);
options.addOption("n", "notify", false, options.addOption("n", "notify", false,
"notify - when adding new items using a workflow, send notification emails"); "notify - when adding new items using a workflow, send notification emails");
options.getOption("n").setType(boolean.class);
options.addOption("v", "validate-only", false, options.addOption("v", "validate-only", false,
"validate - just validate the csv, don't run the import"); "validate - just validate the csv, don't run the import");
options.getOption("v").setType(boolean.class);
options.addOption("t", "template", false, options.addOption("t", "template", false,
"template - when adding new items, use the collection template (if it exists)"); "template - when adding new items, use the collection template (if it exists)");
options.getOption("t").setType(boolean.class);
options.addOption("h", "help", false, "help"); options.addOption("h", "help", false, "help");
options.getOption("h").setType(boolean.class);
super.options = options; super.options = options;
} }

View File

@@ -43,22 +43,14 @@ public class HarvestScriptConfiguration<T extends Harvest> extends ScriptConfigu
public Options getOptions() { public Options getOptions() {
Options options = new Options(); Options options = new Options();
options.addOption("p", "purge", false, "delete all items in the collection"); options.addOption("p", "purge", false, "delete all items in the collection");
options.getOption("p").setType(boolean.class);
options.addOption("r", "run", false, "run the standard harvest procedure"); options.addOption("r", "run", false, "run the standard harvest procedure");
options.getOption("r").setType(boolean.class);
options.addOption("g", "ping", false, "test the OAI server and set"); options.addOption("g", "ping", false, "test the OAI server and set");
options.getOption("g").setType(boolean.class);
options.addOption("s", "setup", false, "Set the collection up for harvesting"); options.addOption("s", "setup", false, "Set the collection up for harvesting");
options.getOption("s").setType(boolean.class);
options.addOption("S", "start", false, "start the harvest loop"); options.addOption("S", "start", false, "start the harvest loop");
options.getOption("S").setType(boolean.class);
options.addOption("R", "reset", false, "reset harvest status on all collections"); options.addOption("R", "reset", false, "reset harvest status on all collections");
options.getOption("R").setType(boolean.class);
options.addOption("P", "purgeCollections", false, "purge all harvestable collections"); options.addOption("P", "purgeCollections", false, "purge all harvestable collections");
options.getOption("P").setType(boolean.class);
options.addOption("o", "reimport", false, "reimport all items in the collection, " + options.addOption("o", "reimport", false, "reimport all items in the collection, " +
"this is equivalent to -p -r, purging all items in a collection and reimporting them"); "this is equivalent to -p -r, purging all items in a collection and reimporting them");
options.getOption("o").setType(boolean.class);
options.addOption("c", "collection", true, options.addOption("c", "collection", true,
"harvesting collection (handle or id)"); "harvesting collection (handle or id)");
options.addOption("t", "type", true, options.addOption("t", "type", true,
@@ -72,7 +64,6 @@ public class HarvestScriptConfiguration<T extends Harvest> extends ScriptConfigu
"crosswalk in dspace.cfg"); "crosswalk in dspace.cfg");
options.addOption("h", "help", false, "help"); options.addOption("h", "help", false, "help");
options.getOption("h").setType(boolean.class);
return options; return options;
} }

View File

@@ -16,7 +16,7 @@ import java.io.StreamTokenizer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.jdom.Document; import org.jdom2.Document;
/** /**
* @author mwood * @author mwood

View File

@@ -29,9 +29,9 @@ import org.dspace.scripts.service.ScriptService;
import org.dspace.servicemanager.DSpaceKernelImpl; import org.dspace.servicemanager.DSpaceKernelImpl;
import org.dspace.servicemanager.DSpaceKernelInit; import org.dspace.servicemanager.DSpaceKernelInit;
import org.dspace.services.RequestService; import org.dspace.services.RequestService;
import org.jdom.Document; import org.jdom2.Document;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.input.SAXBuilder; import org.jdom2.input.SAXBuilder;
/** /**
* A DSpace script launcher. * A DSpace script launcher.

View File

@@ -1,99 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.IOUtils;
import org.apache.logging.log4j.Logger;
import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory;
import org.apache.poi.hssf.extractor.ExcelExtractor;
import org.apache.poi.xssf.extractor.XSSFExcelExtractor;
import org.dspace.content.Item;
/*
* ExcelFilter
*
* Entries you must add to dspace.cfg:
*
* filter.plugins = blah, \
* Excel Text Extractor
*
* plugin.named.org.dspace.app.mediafilter.FormatFilter = \
* blah = blah, \
* org.dspace.app.mediafilter.ExcelFilter = Excel Text Extractor
*
* #Configure each filter's input Formats
* filter.org.dspace.app.mediafilter.ExcelFilter.inputFormats = Microsoft Excel, Microsoft Excel XML
*
*/
public class ExcelFilter extends MediaFilter {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(ExcelFilter.class);
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";
}
/**
* @return String bundle name
*/
public String getBundleName() {
return "TEXT";
}
/**
* @return String bitstream format
*/
public String getFormatString() {
return "Text";
}
/**
* @return String description
*/
public String getDescription() {
return "Extracted text";
}
/**
* @param item item
* @param source source input stream
* @param verbose verbose mode
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item item, InputStream source, boolean verbose)
throws Exception {
String extractedText = null;
try {
POITextExtractor theExtractor = ExtractorFactory.createExtractor(source);
if (theExtractor instanceof ExcelExtractor) {
// for xls file
extractedText = (theExtractor).getText();
} else if (theExtractor instanceof XSSFExcelExtractor) {
// for xlsx file
extractedText = (theExtractor).getText();
}
} catch (Exception e) {
log.error("Error filtering bitstream: " + e.getMessage(), e);
throw e;
}
if (extractedText != null) {
// generate an input stream with the extracted text
return IOUtils.toInputStream(extractedText, StandardCharsets.UTF_8);
}
return null;
}
}

View File

@@ -1,82 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import javax.swing.text.Document;
import javax.swing.text.html.HTMLEditorKit;
import org.dspace.content.Item;
/*
*
* to do: helpful error messages - can't find mediafilter.cfg - can't
* instantiate filter - bitstream format doesn't exist
*
*/
public class HTMLFilter extends MediaFilter {
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";
}
/**
* @return String bundle name
*/
@Override
public String getBundleName() {
return "TEXT";
}
/**
* @return String bitstream format
*/
@Override
public String getFormatString() {
return "Text";
}
/**
* @return String description
*/
@Override
public String getDescription() {
return "Extracted text";
}
/**
* @param currentItem item
* @param source source input stream
* @param verbose verbose mode
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
// try and read the document - set to ignore character set directive,
// assuming that the input stream is already set properly (I hope)
HTMLEditorKit kit = new HTMLEditorKit();
Document doc = kit.createDefaultDocument();
doc.putProperty("IgnoreCharsetDirective", Boolean.TRUE);
kit.read(source, doc, 0);
String extractedText = doc.getText(0, doc.getLength());
// generate an input stream with the extracted text
byte[] textBytes = extractedText.getBytes(StandardCharsets.UTF_8);
ByteArrayInputStream bais = new ByteArrayInputStream(textBytes);
return bais;
}
}

View File

@@ -50,15 +50,11 @@ public class MediaFilterScriptConfiguration<T extends MediaFilterScript> extends
public Options getOptions() { public Options getOptions() {
Options options = new Options(); Options options = new Options();
options.addOption("v", "verbose", false, "print all extracted text and other details to STDOUT"); options.addOption("v", "verbose", false, "print all extracted text and other details to STDOUT");
options.getOption("v").setType(boolean.class);
options.addOption("q", "quiet", false, "do not print anything except in the event of errors."); options.addOption("q", "quiet", false, "do not print anything except in the event of errors.");
options.getOption("q").setType(boolean.class);
options.addOption("f", "force", false, "force all bitstreams to be processed"); options.addOption("f", "force", false, "force all bitstreams to be processed");
options.getOption("f").setType(boolean.class);
options.addOption("i", "identifier", true, "ONLY process bitstreams belonging to identifier"); options.addOption("i", "identifier", true, "ONLY process bitstreams belonging to identifier");
options.addOption("m", "maximum", true, "process no more than maximum items"); options.addOption("m", "maximum", true, "process no more than maximum items");
options.addOption("h", "help", false, "help"); options.addOption("h", "help", false, "help");
options.getOption("h").setType(boolean.class);
Option pluginOption = Option.builder("p") Option pluginOption = Option.builder("p")
.longOpt("plugins") .longOpt("plugins")

View File

@@ -1,137 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import org.apache.logging.log4j.Logger;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.encryption.InvalidPasswordException;
import org.apache.pdfbox.text.PDFTextStripper;
import org.dspace.content.Item;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
/*
*
* to do: helpful error messages - can't find mediafilter.cfg - can't
* instantiate filter - bitstream format doesn't exist
*
*/
public class PDFFilter extends MediaFilter {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PDFFilter.class);
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";
}
/**
* @return String bundle name
*/
@Override
public String getBundleName() {
return "TEXT";
}
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString() {
return "Text";
}
/**
* @return String description
*/
@Override
public String getDescription() {
return "Extracted text";
}
/**
* @param currentItem item
* @param source source input stream
* @param verbose verbose mode
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
try {
boolean useTemporaryFile = configurationService.getBooleanProperty("pdffilter.largepdfs", false);
// get input stream from bitstream
// pass to filter, get string back
PDFTextStripper pts = new PDFTextStripper();
pts.setSortByPosition(true);
PDDocument pdfDoc = null;
Writer writer = null;
File tempTextFile = null;
ByteArrayOutputStream byteStream = null;
if (useTemporaryFile) {
tempTextFile = File.createTempFile("dspacepdfextract" + source.hashCode(), ".txt");
tempTextFile.deleteOnExit();
writer = new OutputStreamWriter(new FileOutputStream(tempTextFile));
} else {
byteStream = new ByteArrayOutputStream();
writer = new OutputStreamWriter(byteStream);
}
try {
pdfDoc = PDDocument.load(source);
pts.writeText(pdfDoc, writer);
} catch (InvalidPasswordException ex) {
log.error("PDF is encrypted. Cannot extract text (item: {})",
() -> currentItem.getHandle());
return null;
} finally {
try {
if (pdfDoc != null) {
pdfDoc.close();
}
} catch (Exception e) {
log.error("Error closing PDF file: " + e.getMessage(), e);
}
try {
writer.close();
} catch (Exception e) {
log.error("Error closing temporary extract file: " + e.getMessage(), e);
}
}
if (useTemporaryFile) {
return new FileInputStream(tempTextFile);
} else {
byte[] bytes = byteStream.toByteArray();
return new ByteArrayInputStream(bytes);
}
} catch (OutOfMemoryError oome) {
log.error("Error parsing PDF document " + oome.getMessage(), oome);
if (!configurationService.getBooleanProperty("pdffilter.skiponmemoryexception", false)) {
throw oome;
}
}
return null;
}
}

View File

@@ -1,72 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory;
import org.apache.poi.openxml4j.exceptions.OpenXML4JException;
import org.apache.xmlbeans.XmlException;
import org.dspace.content.Item;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Extract flat text from Microsoft Word documents (.doc, .docx).
*/
public class PoiWordFilter
extends MediaFilter {
private static final Logger LOG = LoggerFactory.getLogger(PoiWordFilter.class);
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";
}
@Override
public String getBundleName() {
return "TEXT";
}
@Override
public String getFormatString() {
return "Text";
}
@Override
public String getDescription() {
return "Extracted text";
}
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
String text;
try {
// get input stream from bitstream, pass to filter, get string back
POITextExtractor extractor = ExtractorFactory.createExtractor(source);
text = extractor.getText();
} catch (IOException | OpenXML4JException | XmlException e) {
System.err.format("Invalid File Format: %s%n", e.getMessage());
LOG.error("Unable to parse the bitstream: ", e);
throw e;
}
// if verbose flag is set, print out extracted text to STDOUT
if (verbose) {
System.out.println(text);
}
// return the extracted text as a stream.
return new ByteArrayInputStream(text.getBytes(StandardCharsets.UTF_8));
}
}

View File

@@ -1,113 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import org.apache.logging.log4j.Logger;
import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory;
import org.apache.poi.hslf.extractor.PowerPointExtractor;
import org.apache.poi.xslf.extractor.XSLFPowerPointExtractor;
import org.dspace.content.Item;
/*
* TODO: Allow user to configure extraction of only text or only notes
*
*/
public class PowerPointFilter extends MediaFilter {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PowerPointFilter.class);
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";
}
/**
* @return String bundle name
*/
@Override
public String getBundleName() {
return "TEXT";
}
/**
* @return String bitstream format
*
* TODO: Check that this is correct
*/
@Override
public String getFormatString() {
return "Text";
}
/**
* @return String description
*/
@Override
public String getDescription() {
return "Extracted text";
}
/**
* @param currentItem item
* @param source source input stream
* @param verbose verbose mode
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
try {
String extractedText = null;
new ExtractorFactory();
POITextExtractor pptExtractor = ExtractorFactory
.createExtractor(source);
// PowerPoint XML files and legacy format PowerPoint files
// require different classes and APIs for text extraction
// If this is a PowerPoint XML file, extract accordingly
if (pptExtractor instanceof XSLFPowerPointExtractor) {
// The true method arguments indicate that text from
// the slides and the notes is desired
extractedText = ((XSLFPowerPointExtractor) pptExtractor)
.getText(true, true);
} else if (pptExtractor instanceof PowerPointExtractor) { // Legacy PowerPoint files
extractedText = ((PowerPointExtractor) pptExtractor).getText()
+ " " + ((PowerPointExtractor) pptExtractor).getNotes();
}
if (extractedText != null) {
// if verbose flag is set, print out extracted text
// to STDOUT
if (verbose) {
System.out.println(extractedText);
}
// generate an input stream with the extracted text
byte[] textBytes = extractedText.getBytes();
ByteArrayInputStream bais = new ByteArrayInputStream(textBytes);
return bais;
}
} catch (Exception e) {
log.error("Error filtering bitstream: " + e.getMessage(), e);
throw e;
}
return null;
}
}

View File

@@ -0,0 +1,183 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.tika.Tika;
import org.apache.tika.exception.TikaException;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.parser.AutoDetectParser;
import org.apache.tika.sax.BodyContentHandler;
import org.apache.tika.sax.ContentHandlerDecorator;
import org.dspace.content.Item;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.xml.sax.SAXException;
/**
* Text Extraction media filter which uses Apache Tika to extract text from a large number of file formats (including
* all Microsoft formats, PDF, HTML, Text, etc). For a more complete list of file formats supported by Tika see the
* Tika documentation: https://tika.apache.org/2.3.0/formats.html
*/
public class TikaTextExtractionFilter
extends MediaFilter {
private final static Logger log = LogManager.getLogger();
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";
}
@Override
public String getBundleName() {
return "TEXT";
}
@Override
public String getFormatString() {
return "Text";
}
@Override
public String getDescription() {
return "Extracted text";
}
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
boolean useTemporaryFile = configurationService.getBooleanProperty("textextractor.use-temp-file", false);
if (useTemporaryFile) {
// Extract text out of source file using a temp file, returning results as InputStream
return extractUsingTempFile(source, verbose);
}
// Not using temporary file. We'll use Tika's default in-memory parsing.
// Get maximum characters to extract. Default is 100,000 chars, which is also Tika's default setting.
String extractedText;
int maxChars = configurationService.getIntProperty("textextractor.max-chars", 100000);
try {
// Use Tika to extract text from input. Tika will automatically detect the file type.
Tika tika = new Tika();
tika.setMaxStringLength(maxChars); // Tell Tika the maximum number of characters to extract
extractedText = tika.parseToString(source);
} catch (IOException e) {
System.err.format("Unable to extract text from bitstream in Item %s%n", currentItem.getID().toString());
e.printStackTrace();
log.error("Unable to extract text from bitstream in Item {}", currentItem.getID().toString(), e);
throw e;
} catch (OutOfMemoryError oe) {
System.err.format("OutOfMemoryError occurred when extracting text from bitstream in Item %s. " +
"You may wish to enable 'textextractor.use-temp-file'.%n", currentItem.getID().toString());
oe.printStackTrace();
log.error("OutOfMemoryError occurred when extracting text from bitstream in Item {}. " +
"You may wish to enable 'textextractor.use-temp-file'.", currentItem.getID().toString(), oe);
throw oe;
}
if (StringUtils.isNotEmpty(extractedText)) {
// if verbose flag is set, print out extracted text to STDOUT
if (verbose) {
System.out.println("(Verbose mode) Extracted text:");
System.out.println(extractedText);
}
// return the extracted text as a UTF-8 stream.
return new ByteArrayInputStream(extractedText.getBytes(StandardCharsets.UTF_8));
}
return null;
}
/**
* Extracts the text out of a given source InputStream, using a temporary file. This decreases the amount of memory
* necessary for text extraction, but can be slower as it requires writing extracted text to a temporary file.
* @param source source InputStream
* @param verbose verbose mode enabled/disabled
* @return InputStream for temporary file containing extracted text
* @throws IOException
* @throws SAXException
* @throws TikaException
*/
private InputStream extractUsingTempFile(InputStream source, boolean verbose)
throws IOException, TikaException, SAXException {
File tempExtractedTextFile = File.createTempFile("dspacetextextract" + source.hashCode(), ".txt");
if (verbose) {
System.out.println("(Verbose mode) Extracted text was written to temporary file at " +
tempExtractedTextFile.getAbsolutePath());
} else {
tempExtractedTextFile.deleteOnExit();
}
// Open temp file for writing
try (FileWriter writer = new FileWriter(tempExtractedTextFile, StandardCharsets.UTF_8)) {
// Initialize a custom ContentHandlerDecorator which is a BodyContentHandler.
// This mimics the behavior of Tika().parseToString(), which only extracts text from the body of the file.
// This custom Handler writes any extracted text to the temp file.
ContentHandlerDecorator handler = new BodyContentHandler(new ContentHandlerDecorator() {
/**
* Write all extracted characters directly to the temp file.
*/
@Override
public void characters(char[] ch, int start, int length) throws SAXException {
try {
writer.append(new String(ch), start, length);
} catch (IOException e) {
String errorMsg = String.format("Could not append to temporary file at %s " +
"when performing text extraction",
tempExtractedTextFile.getAbsolutePath());
log.error(errorMsg, e);
throw new SAXException(errorMsg, e);
}
}
/**
* Write all ignorable whitespace directly to the temp file.
* This mimics the behaviour of Tika().parseToString() which extracts ignorableWhitespace characters
* (like blank lines, indentations, etc.), so that we get the same extracted text either way.
*/
@Override
public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException {
try {
writer.append(new String(ch), start, length);
} catch (IOException e) {
String errorMsg = String.format("Could not append to temporary file at %s " +
"when performing text extraction",
tempExtractedTextFile.getAbsolutePath());
log.error(errorMsg, e);
throw new SAXException(errorMsg, e);
}
}
});
AutoDetectParser parser = new AutoDetectParser();
Metadata metadata = new Metadata();
// parse our source InputStream using the above custom handler
parser.parse(source, handler, metadata);
}
// At this point, all extracted text is written to our temp file. So, return a FileInputStream for that file
return new FileInputStream(tempExtractedTextFile);
}
}

View File

@@ -42,7 +42,7 @@ import org.dspace.core.Context;
import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.jdom.Element; import org.jdom2.Element;
/** /**
* Configuration and mapping for Google Scholar output metadata * Configuration and mapping for Google Scholar output metadata

View File

@@ -16,10 +16,10 @@ import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import com.sun.syndication.feed.module.opensearch.OpenSearchModule; import com.rometools.modules.opensearch.OpenSearchModule;
import com.sun.syndication.feed.module.opensearch.entity.OSQuery; import com.rometools.modules.opensearch.entity.OSQuery;
import com.sun.syndication.feed.module.opensearch.impl.OpenSearchModuleImpl; import com.rometools.modules.opensearch.impl.OpenSearchModuleImpl;
import com.sun.syndication.io.FeedException; import com.rometools.rome.io.FeedException;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.app.util.service.OpenSearchService; import org.dspace.app.util.service.OpenSearchService;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
@@ -29,11 +29,11 @@ import org.dspace.discovery.IndexableObject;
import org.dspace.handle.service.HandleService; import org.dspace.handle.service.HandleService;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.JDOMException; import org.jdom2.JDOMException;
import org.jdom.Namespace; import org.jdom2.Namespace;
import org.jdom.output.DOMOutputter; import org.jdom2.output.DOMOutputter;
import org.jdom.output.XMLOutputter; import org.jdom2.output.XMLOutputter;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.w3c.dom.Document; import org.w3c.dom.Document;
@@ -192,7 +192,7 @@ public class OpenSearchServiceImpl implements OpenSearchService {
* @param scope - null for the entire repository, or a collection/community handle * @param scope - null for the entire repository, or a collection/community handle
* @return Service Document * @return Service Document
*/ */
protected org.jdom.Document getServiceDocument(String scope) { protected org.jdom2.Document getServiceDocument(String scope) {
ConfigurationService config = DSpaceServicesFactory.getInstance().getConfigurationService(); ConfigurationService config = DSpaceServicesFactory.getInstance().getConfigurationService();
Namespace ns = Namespace.getNamespace(osNs); Namespace ns = Namespace.getNamespace(osNs);
@@ -245,7 +245,7 @@ public class OpenSearchServiceImpl implements OpenSearchService {
url.setAttribute("template", template.toString()); url.setAttribute("template", template.toString());
root.addContent(url); root.addContent(url);
} }
return new org.jdom.Document(root); return new org.jdom2.Document(root);
} }
/** /**
@@ -255,7 +255,7 @@ public class OpenSearchServiceImpl implements OpenSearchService {
* @return W3C Document object * @return W3C Document object
* @throws IOException if IO error * @throws IOException if IO error
*/ */
protected Document jDomToW3(org.jdom.Document jdomDoc) throws IOException { protected Document jDomToW3(org.jdom2.Document jdomDoc) throws IOException {
DOMOutputter domOut = new DOMOutputter(); DOMOutputter domOut = new DOMOutputter();
try { try {
return domOut.output(jdomDoc); return domOut.output(jdomDoc);

View File

@@ -15,26 +15,26 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import com.sun.syndication.feed.module.DCModule; import com.rometools.modules.itunes.EntryInformation;
import com.sun.syndication.feed.module.DCModuleImpl; import com.rometools.modules.itunes.EntryInformationImpl;
import com.sun.syndication.feed.module.Module; import com.rometools.modules.itunes.types.Duration;
import com.sun.syndication.feed.module.itunes.EntryInformation; import com.rometools.rome.feed.module.DCModule;
import com.sun.syndication.feed.module.itunes.EntryInformationImpl; import com.rometools.rome.feed.module.DCModuleImpl;
import com.sun.syndication.feed.module.itunes.types.Duration; import com.rometools.rome.feed.module.Module;
import com.sun.syndication.feed.synd.SyndContent; import com.rometools.rome.feed.synd.SyndContent;
import com.sun.syndication.feed.synd.SyndContentImpl; import com.rometools.rome.feed.synd.SyndContentImpl;
import com.sun.syndication.feed.synd.SyndEnclosure; import com.rometools.rome.feed.synd.SyndEnclosure;
import com.sun.syndication.feed.synd.SyndEnclosureImpl; import com.rometools.rome.feed.synd.SyndEnclosureImpl;
import com.sun.syndication.feed.synd.SyndEntry; import com.rometools.rome.feed.synd.SyndEntry;
import com.sun.syndication.feed.synd.SyndEntryImpl; import com.rometools.rome.feed.synd.SyndEntryImpl;
import com.sun.syndication.feed.synd.SyndFeed; import com.rometools.rome.feed.synd.SyndFeed;
import com.sun.syndication.feed.synd.SyndFeedImpl; import com.rometools.rome.feed.synd.SyndFeedImpl;
import com.sun.syndication.feed.synd.SyndImage; import com.rometools.rome.feed.synd.SyndImage;
import com.sun.syndication.feed.synd.SyndImageImpl; import com.rometools.rome.feed.synd.SyndImageImpl;
import com.sun.syndication.feed.synd.SyndPerson; import com.rometools.rome.feed.synd.SyndPerson;
import com.sun.syndication.feed.synd.SyndPersonImpl; import com.rometools.rome.feed.synd.SyndPersonImpl;
import com.sun.syndication.io.FeedException; import com.rometools.rome.io.FeedException;
import com.sun.syndication.io.SyndFeedOutput; import com.rometools.rome.io.SyndFeedOutput;
import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;

View File

@@ -21,7 +21,8 @@ import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient; import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.log4j.Logger; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authority.AuthorityValue; import org.dspace.authority.AuthorityValue;
import org.dspace.authority.SolrAuthorityInterface; import org.dspace.authority.SolrAuthorityInterface;
import org.dspace.external.OrcidRestConnector; import org.dspace.external.OrcidRestConnector;
@@ -40,7 +41,7 @@ import org.orcid.jaxb.model.v3.release.search.Result;
*/ */
public class Orcidv3SolrAuthorityImpl implements SolrAuthorityInterface { public class Orcidv3SolrAuthorityImpl implements SolrAuthorityInterface {
private static Logger log = Logger.getLogger(Orcidv3SolrAuthorityImpl.class); private final static Logger log = LogManager.getLogger();
private OrcidRestConnector orcidRestConnector; private OrcidRestConnector orcidRestConnector;
private String OAUTHUrl; private String OAUTHUrl;

View File

@@ -93,7 +93,7 @@ public class ResourcePolicy implements ReloadableEntity<Integer> {
private String rptype; private String rptype;
@Lob @Lob
@Type(type = "org.hibernate.type.MaterializedClobType") @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType")
@Column(name = "rpdescription") @Column(name = "rpdescription")
private String rpdescription; private String rpdescription;

View File

@@ -11,6 +11,7 @@ import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Community; import org.dspace.content.Community;
@@ -202,6 +203,8 @@ public class BrowseEngine {
// get the table name that we are going to be getting our data from // get the table name that we are going to be getting our data from
dao.setTable(browseIndex.getTableName()); dao.setTable(browseIndex.getTableName());
dao.setStartsWith(StringUtils.lowerCase(scope.getStartsWith()));
// tell the browse query whether we are ascending or descending on the value // tell the browse query whether we are ascending or descending on the value
dao.setAscending(scope.isAscending()); dao.setAscending(scope.isAscending());
@@ -248,9 +251,6 @@ public class BrowseEngine {
} }
} }
// this is the total number of results in answer to the query
int total = getTotalResults();
// assemble the ORDER BY clause // assemble the ORDER BY clause
String orderBy = browseIndex.getSortField(scope.isSecondLevel()); String orderBy = browseIndex.getSortField(scope.isSecondLevel());
if (scope.getSortBy() > 0) { if (scope.getSortBy() > 0) {
@@ -258,6 +258,9 @@ public class BrowseEngine {
} }
dao.setOrderField(orderBy); dao.setOrderField(orderBy);
// this is the total number of results in answer to the query
int total = getTotalResults();
int offset = scope.getOffset(); int offset = scope.getOffset();
String rawFocusValue = null; String rawFocusValue = null;
if (offset < 1 && (scope.hasJumpToItem() || scope.hasJumpToValue() || scope.hasStartsWith())) { if (offset < 1 && (scope.hasJumpToItem() || scope.hasJumpToValue() || scope.hasStartsWith())) {
@@ -269,9 +272,6 @@ public class BrowseEngine {
String focusValue = normalizeJumpToValue(rawFocusValue); String focusValue = normalizeJumpToValue(rawFocusValue);
log.debug("browsing using focus: " + focusValue); log.debug("browsing using focus: " + focusValue);
// Convert the focus value into an offset
offset = getOffsetForValue(focusValue);
} }
dao.setOffset(offset); dao.setOffset(offset);
@@ -684,13 +684,11 @@ public class BrowseEngine {
// our count, storing them locally to reinstate later // our count, storing them locally to reinstate later
String focusField = dao.getJumpToField(); String focusField = dao.getJumpToField();
String focusValue = dao.getJumpToValue(); String focusValue = dao.getJumpToValue();
String orderField = dao.getOrderField();
int limit = dao.getLimit(); int limit = dao.getLimit();
int offset = dao.getOffset(); int offset = dao.getOffset();
dao.setJumpToField(null); dao.setJumpToField(null);
dao.setJumpToValue(null); dao.setJumpToValue(null);
dao.setOrderField(null);
dao.setLimit(-1); dao.setLimit(-1);
dao.setOffset(-1); dao.setOffset(-1);
@@ -700,7 +698,6 @@ public class BrowseEngine {
// now put back the values we removed for this method // now put back the values we removed for this method
dao.setJumpToField(focusField); dao.setJumpToField(focusField);
dao.setJumpToValue(focusValue); dao.setJumpToValue(focusValue);
dao.setOrderField(orderField);
dao.setLimit(limit); dao.setLimit(limit);
dao.setOffset(offset); dao.setOffset(offset);
dao.setCountValues(null); dao.setCountValues(null);

View File

@@ -205,6 +205,9 @@ public class SolrBrowseDAO implements BrowseDAO {
} else if (valuePartial) { } else if (valuePartial) {
query.addFilterQueries("{!field f=" + facetField + "_partial}" + value); query.addFilterQueries("{!field f=" + facetField + "_partial}" + value);
} }
if (StringUtils.isNotBlank(startsWith) && orderField != null) {
query.addFilterQueries("bi_" + orderField + "_sort:" + startsWith + "*");
}
// filter on item to be sure to don't include any other object // filter on item to be sure to don't include any other object
// indexed in the Discovery Search core // indexed in the Discovery Search core
query.addFilterQueries("search.resourcetype:" + IndexableItem.TYPE); query.addFilterQueries("search.resourcetype:" + IndexableItem.TYPE);

View File

@@ -59,7 +59,7 @@ public class MetadataValue implements ReloadableEntity<Integer> {
* The value of the field * The value of the field
*/ */
@Lob @Lob
@Type(type = "org.hibernate.type.MaterializedClobType") @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType")
@Column(name = "text_value") @Column(name = "text_value")
private String value; private String value;

View File

@@ -12,7 +12,8 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import org.apache.log4j.Logger; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.factory.EPersonServiceFactory;
@@ -24,7 +25,8 @@ import org.dspace.util.UUIDUtils;
* @author Mykhaylo Boychuk (4science.it) * @author Mykhaylo Boychuk (4science.it)
*/ */
public class EPersonAuthority implements ChoiceAuthority { public class EPersonAuthority implements ChoiceAuthority {
private static final Logger log = Logger.getLogger(EPersonAuthority.class);
private static final Logger log = LogManager.getLogger(EPersonAuthority.class);
/** /**
* the name assigned to the specific instance by the PluginService, @see * the name assigned to the specific instance by the PluginService, @see

View File

@@ -14,8 +14,8 @@ import java.util.List;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.Namespace; import org.jdom2.Namespace;
/** /**
* Crosswalk descriptive metadata to and from DIM (DSpace Intermediate * Crosswalk descriptive metadata to and from DIM (DSpace Intermediate

View File

@@ -40,8 +40,8 @@ import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService; import org.dspace.handle.service.HandleService;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.Namespace; import org.jdom2.Namespace;
/** /**
* Crosswalk of technical metadata for DSpace AIP. This is * Crosswalk of technical metadata for DSpace AIP. This is

View File

@@ -23,8 +23,8 @@ import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.Namespace; import org.jdom2.Namespace;
/** /**
* DIM dissemination crosswalk * DIM dissemination crosswalk

View File

@@ -19,8 +19,8 @@ import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.Namespace; import org.jdom2.Namespace;
/** /**
* DIM ingestion crosswalk * DIM ingestion crosswalk

View File

@@ -14,8 +14,8 @@ import java.util.List;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.Namespace; import org.jdom2.Namespace;
/** /**
* Dissemination Crosswalk plugin -- translate DSpace native * Dissemination Crosswalk plugin -- translate DSpace native

View File

@@ -14,7 +14,7 @@ import java.util.List;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.jdom.Element; import org.jdom2.Element;
/** /**
* Ingestion Crosswalk plugin -- translate an external metadata format * Ingestion Crosswalk plugin -- translate an external metadata format

View File

@@ -24,11 +24,11 @@ import org.dspace.core.Context;
import org.dspace.core.factory.CoreServiceFactory; import org.dspace.core.factory.CoreServiceFactory;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.jdom.Document; import org.jdom2.Document;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.JDOMException; import org.jdom2.JDOMException;
import org.jdom.Namespace; import org.jdom2.Namespace;
import org.jdom.input.SAXBuilder; import org.jdom2.input.SAXBuilder;
/** /**
* METS dissemination crosswalk * METS dissemination crosswalk

View File

@@ -35,8 +35,8 @@ import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.EPersonService;
import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.GroupService;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.Namespace; import org.jdom2.Namespace;
/** /**
* METSRights Ingestion and Dissemination Crosswalk * METSRights Ingestion and Dissemination Crosswalk

View File

@@ -15,7 +15,6 @@ import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Enumeration; import java.util.Enumeration;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Properties; import java.util.Properties;
@@ -42,16 +41,18 @@ import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService; import org.dspace.handle.service.HandleService;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.jdom.Attribute; import org.jdom2.Attribute;
import org.jdom.Document; import org.jdom2.Document;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.JDOMException; import org.jdom2.JDOMException;
import org.jdom.Namespace; import org.jdom2.Namespace;
import org.jdom.Text; import org.jdom2.Text;
import org.jdom.Verifier; import org.jdom2.Verifier;
import org.jdom.input.SAXBuilder; import org.jdom2.filter.Filters;
import org.jdom.output.XMLOutputter; import org.jdom2.input.SAXBuilder;
import org.jdom.xpath.XPath; import org.jdom2.output.XMLOutputter;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/** /**
* Configurable MODS Crosswalk * Configurable MODS Crosswalk
@@ -156,7 +157,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
static class modsTriple { static class modsTriple {
public String qdc = null; public String qdc = null;
public Element xml = null; public Element xml = null;
public XPath xpath = null; public XPathExpression xpath = null;
/** /**
* Initialize from text versions of QDC, XML and XPath. * Initialize from text versions of QDC, XML and XPath.
@@ -171,9 +172,9 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
final String postlog = "</mods>"; final String postlog = "</mods>";
try { try {
result.qdc = qdc; result.qdc = qdc;
result.xpath = XPath.newInstance(xpath); result.xpath =
result.xpath.addNamespace(MODS_NS.getPrefix(), MODS_NS.getURI()); XPathFactory.instance()
result.xpath.addNamespace(XLINK_NS); .compile(xpath, Filters.fpassthrough(), null, MODS_NS, XLINK_NS);
Document d = builder.build(new StringReader(prolog + xml + postlog)); Document d = builder.build(new StringReader(prolog + xml + postlog));
result.xml = (Element) d.getRootElement().getContent(0); result.xml = (Element) d.getRootElement().getContent(0);
} catch (JDOMException | IOException je) { } catch (JDOMException | IOException je) {
@@ -295,6 +296,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
* @throws IOException if IO error * @throws IOException if IO error
* @throws SQLException if database error * @throws SQLException if database error
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
* @return List of Elements
*/ */
@Override @Override
public List<Element> disseminateList(Context context, DSpaceObject dso) public List<Element> disseminateList(Context context, DSpaceObject dso)
@@ -352,37 +354,29 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
if (trip == null) { if (trip == null) {
log.warn("WARNING: " + getPluginInstanceName() + ": No MODS mapping for \"" + qdc + "\""); log.warn("WARNING: " + getPluginInstanceName() + ": No MODS mapping for \"" + qdc + "\"");
} else { } else {
try {
Element me = (Element) trip.xml.clone(); Element me = (Element) trip.xml.clone();
if (addSchema) { if (addSchema) {
me.setAttribute("schemaLocation", schemaLocation, XSI_NS); me.setAttribute("schemaLocation", schemaLocation, XSI_NS);
} }
Iterator ni = trip.xpath.selectNodes(me).iterator(); List<Object> matches = trip.xpath.evaluate(me);
if (!ni.hasNext()) { if (matches.isEmpty()) {
log.warn("XPath \"" + trip.xpath.getXPath() + log.warn("XPath \"" + trip.xpath.getExpression() +
"\" found no elements in \"" + "\" found no elements in \"" +
outputUgly.outputString(me) + outputUgly.outputString(me) +
"\", qdc=" + qdc); "\", qdc=" + qdc);
} }
while (ni.hasNext()) { for (Object match: matches) {
Object what = ni.next(); if (match instanceof Element) {
if (what instanceof Element) { ((Element) match).setText(checkedString(value));
((Element) what).setText(checkedString(value)); } else if (match instanceof Attribute) {
} else if (what instanceof Attribute) { ((Attribute) match).setValue(checkedString(value));
((Attribute) what).setValue(checkedString(value)); } else if (match instanceof Text) {
} else if (what instanceof Text) { ((Text) match).setText(checkedString(value));
((Text) what).setText(checkedString(value));
} else { } else {
log.warn("Got unknown object from XPath, class=" + what.getClass().getName()); log.warn("Got unknown object from XPath, class=" + match.getClass().getName());
} }
} }
result.add(me); result.add(me);
} catch (JDOMException je) {
log.error("Error following XPath in modsTriple: context=" +
outputUgly.outputString(trip.xml) +
", xpath=" + trip.xpath.getXPath() + ", exception=" +
je.toString());
}
} }
} }
return result; return result;

View File

@@ -15,9 +15,9 @@ import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.output.Format; import org.jdom2.output.Format;
import org.jdom.output.XMLOutputter; import org.jdom2.output.XMLOutputter;
/** /**
* "Null" ingestion crosswalk * "Null" ingestion crosswalk

View File

@@ -20,8 +20,8 @@ import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.Namespace; import org.jdom2.Namespace;
/** /**
* DIM ingestion crosswalk * DIM ingestion crosswalk

View File

@@ -31,8 +31,8 @@ import org.dspace.core.Context;
import org.dspace.core.Utils; import org.dspace.core.Utils;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.Namespace; import org.jdom2.Namespace;
/** /**
* ORE dissemination crosswalk * ORE dissemination crosswalk

View File

@@ -34,12 +34,13 @@ import org.dspace.content.service.BundleService;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.jdom.Attribute; import org.jdom2.Attribute;
import org.jdom.Document; import org.jdom2.Document;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.JDOMException; import org.jdom2.Namespace;
import org.jdom.Namespace; import org.jdom2.filter.Filters;
import org.jdom.xpath.XPath; import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/** /**
* ORE ingestion crosswalk * ORE ingestion crosswalk
@@ -113,23 +114,21 @@ public class OREIngestionCrosswalk
Document doc = new Document(); Document doc = new Document();
doc.addContent(root.detach()); doc.addContent(root.detach());
XPath xpathLinks;
List<Element> aggregatedResources; List<Element> aggregatedResources;
String entryId; String entryId;
try { XPathExpression<Element> xpathLinks =
xpathLinks = XPath.newInstance("/atom:entry/atom:link[@rel=\"" + ORE_NS.getURI() + "aggregates" + "\"]"); XPathFactory.instance()
xpathLinks.addNamespace(ATOM_NS); .compile("/atom:entry/atom:link[@rel=\"" + ORE_NS.getURI() + "aggregates" + "\"]",
aggregatedResources = xpathLinks.selectNodes(doc); Filters.element(), null, ATOM_NS);
aggregatedResources = xpathLinks.evaluate(doc);
xpathLinks = XPath.newInstance("/atom:entry/atom:link[@rel='alternate']/@href"); XPathExpression<Attribute> xpathAltHref =
xpathLinks.addNamespace(ATOM_NS); XPathFactory.instance()
entryId = ((Attribute) xpathLinks.selectSingleNode(doc)).getValue(); .compile("/atom:entry/atom:link[@rel='alternate']/@href",
} catch (JDOMException e) { Filters.attribute(), null, ATOM_NS);
throw new CrosswalkException("JDOM exception occurred while ingesting the ORE", e); entryId = xpathAltHref.evaluateFirst(doc).getValue();
}
// Next for each resource, create a bitstream // Next for each resource, create a bitstream
XPath xpathDesc;
NumberFormat nf = NumberFormat.getInstance(); NumberFormat nf = NumberFormat.getInstance();
nf.setGroupingUsed(false); nf.setGroupingUsed(false);
nf.setMinimumIntegerDigits(4); nf.setMinimumIntegerDigits(4);
@@ -140,16 +139,12 @@ public class OREIngestionCrosswalk
String bundleName; String bundleName;
Element desc = null; Element desc = null;
try { XPathExpression<Element> xpathDesc =
xpathDesc = XPath.newInstance( XPathFactory.instance()
"/atom:entry/oreatom:triples/rdf:Description[@rdf:about=\"" + this.encodeForURL(href) + "\"][1]"); .compile("/atom:entry/oreatom:triples/rdf:Description[@rdf:about=\"" +
xpathDesc.addNamespace(ATOM_NS); this.encodeForURL(href) + "\"][1]",
xpathDesc.addNamespace(ORE_ATOM); Filters.element(), null, ATOM_NS, ORE_ATOM, RDF_NS);
xpathDesc.addNamespace(RDF_NS); desc = xpathDesc.evaluateFirst(doc);
desc = (Element) xpathDesc.selectSingleNode(doc);
} catch (JDOMException e) {
log.warn("Could not find description for {}", href, e);
}
if (desc != null && desc.getChild("type", RDF_NS).getAttributeValue("resource", RDF_NS) if (desc != null && desc.getChild("type", RDF_NS).getAttributeValue("resource", RDF_NS)
.equals(DS_NS.getURI() + "DSpaceBitstream")) { .equals(DS_NS.getURI() + "DSpaceBitstream")) {

View File

@@ -30,8 +30,8 @@ import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.Namespace; import org.jdom2.Namespace;
/** /**
* PREMIS Crosswalk * PREMIS Crosswalk

View File

@@ -14,7 +14,7 @@ import java.util.Map;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.jdom.Element; import org.jdom2.Element;
/** /**
* Translate DSpace native metadata into an external XML format, with parameters. * Translate DSpace native metadata into an external XML format, with parameters.

View File

@@ -36,10 +36,10 @@ import org.dspace.core.Context;
import org.dspace.core.SelfNamedPlugin; import org.dspace.core.SelfNamedPlugin;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.jdom.Document; import org.jdom2.Document;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.Namespace; import org.jdom2.Namespace;
import org.jdom.input.SAXBuilder; import org.jdom2.input.SAXBuilder;
/** /**
* Configurable QDC Crosswalk * Configurable QDC Crosswalk
@@ -290,7 +290,7 @@ public class QDCCrosswalk extends SelfNamedPlugin
qdc2element.put(qdc, element); qdc2element.put(qdc, element);
element2qdc.put(makeQualifiedTagName(element), qdc); element2qdc.put(makeQualifiedTagName(element), qdc);
log.debug("Building Maps: qdc=\"" + qdc + "\", element=\"" + element.toString() + "\""); log.debug("Building Maps: qdc=\"" + qdc + "\", element=\"" + element.toString() + "\"");
} catch (org.jdom.JDOMException je) { } catch (org.jdom2.JDOMException je) {
throw new CrosswalkInternalException( throw new CrosswalkInternalException(
"Failed parsing XML fragment in properties file: \"" + prolog + val + postlog + "\": " + je "Failed parsing XML fragment in properties file: \"" + prolog + val + postlog + "\": " + je
.toString(), je); .toString(), je);
@@ -326,6 +326,7 @@ public class QDCCrosswalk extends SelfNamedPlugin
* @throws IOException if IO error * @throws IOException if IO error
* @throws SQLException if database error * @throws SQLException if database error
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
* @return List of Elements
*/ */
@Override @Override
public List<Element> disseminateList(Context context, DSpaceObject dso) public List<Element> disseminateList(Context context, DSpaceObject dso)

View File

@@ -26,12 +26,12 @@ import org.dspace.core.factory.CoreServiceFactory;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.workflow.WorkflowException; import org.dspace.workflow.WorkflowException;
import org.jdom.Document; import org.jdom2.Document;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.JDOMException; import org.jdom2.JDOMException;
import org.jdom.Namespace; import org.jdom2.Namespace;
import org.jdom.input.SAXBuilder; import org.jdom2.input.SAXBuilder;
import org.jdom.output.XMLOutputter; import org.jdom2.output.XMLOutputter;
/** /**
* Role Crosswalk * Role Crosswalk

View File

@@ -24,8 +24,8 @@ import org.dspace.content.service.ItemService;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.SelfNamedPlugin; import org.dspace.core.SelfNamedPlugin;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.Namespace; import org.jdom2.Namespace;
/** /**
* Disseminator for Simple Dublin Core metadata in XML format. * Disseminator for Simple Dublin Core metadata in XML format.
@@ -84,6 +84,7 @@ public class SimpleDCDisseminationCrosswalk extends SelfNamedPlugin
* @throws IOException if IO error * @throws IOException if IO error
* @throws SQLException if database error * @throws SQLException if database error
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
* @return List of Elements
*/ */
@Override @Override
public List<Element> disseminateList(Context context, DSpaceObject dso) public List<Element> disseminateList(Context context, DSpaceObject dso)

View File

@@ -34,9 +34,9 @@ import org.dspace.core.Context;
import org.dspace.core.SelfNamedPlugin; import org.dspace.core.SelfNamedPlugin;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.Namespace; import org.jdom2.Namespace;
import org.jdom.Verifier; import org.jdom2.Verifier;
/** /**
* Crosswalk for creating appropriate &lt;meta&gt; elements to appear in the * Crosswalk for creating appropriate &lt;meta&gt; elements to appear in the
@@ -178,6 +178,7 @@ public class XHTMLHeadDisseminationCrosswalk
* @throws IOException if IO error * @throws IOException if IO error
* @throws SQLException if database error * @throws SQLException if database error
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
* @return List of Elements
*/ */
@Override @Override
public List<Element> disseminateList(Context context, DSpaceObject dso) throws CrosswalkException, public List<Element> disseminateList(Context context, DSpaceObject dso) throws CrosswalkException,

View File

@@ -21,7 +21,7 @@ import javax.xml.transform.stream.StreamSource;
import org.dspace.core.SelfNamedPlugin; import org.dspace.core.SelfNamedPlugin;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.jdom.Namespace; import org.jdom2.Namespace;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@@ -18,6 +18,7 @@ import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.stream.Collectors;
import javax.xml.transform.Transformer; import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerException;
@@ -41,14 +42,15 @@ import org.dspace.core.factory.CoreServiceFactory;
import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.jdom.Document; import org.jdom2.Content;
import org.jdom.Element; import org.jdom2.Document;
import org.jdom.Namespace; import org.jdom2.Element;
import org.jdom.Verifier; import org.jdom2.Namespace;
import org.jdom.output.Format; import org.jdom2.Verifier;
import org.jdom.output.XMLOutputter; import org.jdom2.output.Format;
import org.jdom.transform.JDOMResult; import org.jdom2.output.XMLOutputter;
import org.jdom.transform.JDOMSource; import org.jdom2.transform.JDOMResult;
import org.jdom2.transform.JDOMSource;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@@ -244,6 +246,7 @@ public class XSLTDisseminationCrosswalk
* @throws SQLException if database error * @throws SQLException if database error
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
* @see DisseminationCrosswalk * @see DisseminationCrosswalk
* @return List of Elements
*/ */
@Override @Override
public List<Element> disseminateList(Context context, DSpaceObject dso) public List<Element> disseminateList(Context context, DSpaceObject dso)
@@ -268,7 +271,12 @@ public class XSLTDisseminationCrosswalk
try { try {
JDOMResult result = new JDOMResult(); JDOMResult result = new JDOMResult();
xform.transform(new JDOMSource(createDIM(dso).getChildren()), result); xform.transform(new JDOMSource(createDIM(dso).getChildren()), result);
return result.getResult(); List<Content> contentList = result.getResult();
// Transform List<Content> into List<Element>
List<Element> elementList = contentList.stream()
.filter(obj -> obj instanceof Element)
.map(Element.class::cast).collect(Collectors.toList());
return elementList;
} catch (TransformerException e) { } catch (TransformerException e) {
LOG.error("Got error: " + e.toString()); LOG.error("Got error: " + e.toString());
throw new CrosswalkInternalException("XSL translation failed: " + e.toString(), e); throw new CrosswalkInternalException("XSL translation failed: " + e.toString(), e);

View File

@@ -12,6 +12,7 @@ import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.stream.Collectors;
import javax.xml.transform.Transformer; import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerException;
@@ -34,13 +35,14 @@ import org.dspace.content.service.ItemService;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.factory.CoreServiceFactory; import org.dspace.core.factory.CoreServiceFactory;
import org.jdom.Document; import org.jdom2.Content;
import org.jdom.Element; import org.jdom2.Document;
import org.jdom.input.SAXBuilder; import org.jdom2.Element;
import org.jdom.output.Format; import org.jdom2.input.SAXBuilder;
import org.jdom.output.XMLOutputter; import org.jdom2.output.Format;
import org.jdom.transform.JDOMResult; import org.jdom2.output.XMLOutputter;
import org.jdom.transform.JDOMSource; import org.jdom2.transform.JDOMResult;
import org.jdom2.transform.JDOMSource;
/** /**
* Configurable XSLT-driven ingestion Crosswalk * Configurable XSLT-driven ingestion Crosswalk
@@ -141,7 +143,12 @@ public class XSLTIngestionCrosswalk
try { try {
JDOMResult result = new JDOMResult(); JDOMResult result = new JDOMResult();
xform.transform(new JDOMSource(metadata), result); xform.transform(new JDOMSource(metadata), result);
ingestDIM(context, dso, result.getResult(), createMissingMetadataFields); List<Content> contentList = result.getResult();
// Transform List<Content> into List<Element>
List<Element> elementList = contentList.stream()
.filter(obj -> obj instanceof Element)
.map(Element.class::cast).collect(Collectors.toList());
ingestDIM(context, dso, elementList, createMissingMetadataFields);
} catch (TransformerException e) { } catch (TransformerException e) {
log.error("Got error: " + e.toString()); log.error("Got error: " + e.toString());
throw new CrosswalkInternalException("XSL Transformation failed: " + e.toString(), e); throw new CrosswalkInternalException("XSL Transformation failed: " + e.toString(), e);

View File

@@ -7,7 +7,8 @@
*/ */
package org.dspace.content.logic; package org.dspace.content.logic;
import org.apache.log4j.Logger; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.core.Context; import org.dspace.core.Context;
@@ -21,7 +22,7 @@ import org.dspace.core.Context;
*/ */
public class DefaultFilter implements Filter { public class DefaultFilter implements Filter {
private LogicalStatement statement; private LogicalStatement statement;
private static Logger log = Logger.getLogger(Filter.class); private final static Logger log = LogManager.getLogger();
/** /**
* Set statement from Spring configuration in item-filters.xml * Set statement from Spring configuration in item-filters.xml

View File

@@ -10,7 +10,8 @@ package org.dspace.content.logic.condition;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List; import java.util.List;
import org.apache.log4j.Logger; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Community; import org.dspace.content.Community;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
@@ -26,7 +27,7 @@ import org.dspace.core.Context;
* @version $Revision$ * @version $Revision$
*/ */
public class InCommunityCondition extends AbstractCondition { public class InCommunityCondition extends AbstractCondition {
private static Logger log = Logger.getLogger(InCommunityCondition.class); private final static Logger log = LogManager.getLogger();
/** /**
* Return true if item is in one of the specified collections * Return true if item is in one of the specified collections

View File

@@ -7,7 +7,8 @@
*/ */
package org.dspace.content.logic.condition; package org.dspace.content.logic.condition;
import org.apache.log4j.Logger; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.logic.LogicalStatementException; import org.dspace.content.logic.LogicalStatementException;
import org.dspace.core.Context; import org.dspace.core.Context;
@@ -19,7 +20,7 @@ import org.dspace.core.Context;
* @version $Revision$ * @version $Revision$
*/ */
public class IsWithdrawnCondition extends AbstractCondition { public class IsWithdrawnCondition extends AbstractCondition {
private static Logger log = Logger.getLogger(IsWithdrawnCondition.class); private final static Logger log = LogManager.getLogger();
/** /**
* Return true if item is withdrawn * Return true if item is withdrawn

View File

@@ -11,7 +11,8 @@ import java.util.List;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.log4j.Logger; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
import org.dspace.content.logic.LogicalStatementException; import org.dspace.content.logic.LogicalStatementException;
@@ -26,7 +27,7 @@ import org.dspace.core.Context;
*/ */
public class MetadataValueMatchCondition extends AbstractCondition { public class MetadataValueMatchCondition extends AbstractCondition {
private static Logger log = Logger.getLogger(MetadataValueMatchCondition.class); private final static Logger log = LogManager.getLogger();
/** /**
* Return true if any value for a specified field in the item matches a specified regex pattern * Return true if any value for a specified field in the item matches a specified regex pattern

View File

@@ -11,7 +11,8 @@ import java.util.List;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.log4j.Logger; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
import org.dspace.content.logic.LogicalStatementException; import org.dspace.content.logic.LogicalStatementException;
@@ -26,7 +27,7 @@ import org.dspace.core.Context;
*/ */
public class MetadataValuesMatchCondition extends AbstractCondition { public class MetadataValuesMatchCondition extends AbstractCondition {
private static Logger log = Logger.getLogger(MetadataValuesMatchCondition.class); private final static Logger log = LogManager.getLogger();
/** /**
* Return true if any value for a specified field in the item matches any of the specified regex patterns * Return true if any value for a specified field in the item matches any of the specified regex patterns

View File

@@ -10,7 +10,8 @@ package org.dspace.content.logic.condition;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List; import java.util.List;
import org.apache.log4j.Logger; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.ResourcePolicy;
import org.dspace.authorize.factory.AuthorizeServiceFactory; import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.AuthorizeService;
@@ -27,7 +28,7 @@ import org.dspace.core.Context;
* @version $Revision$ * @version $Revision$
*/ */
public class ReadableByGroupCondition extends AbstractCondition { public class ReadableByGroupCondition extends AbstractCondition {
private static Logger log = Logger.getLogger(ReadableByGroupCondition.class); private final static Logger log = LogManager.getLogger();
// Authorize service // Authorize service
AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();

View File

@@ -83,10 +83,10 @@ import org.dspace.license.factory.LicenseServiceFactory;
import org.dspace.license.service.CreativeCommonsService; import org.dspace.license.service.CreativeCommonsService;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.Namespace; import org.jdom2.Namespace;
import org.jdom.output.Format; import org.jdom2.output.Format;
import org.jdom.output.XMLOutputter; import org.jdom2.output.XMLOutputter;
/** /**
* Base class for disseminator of * Base class for disseminator of

View File

@@ -51,7 +51,7 @@ import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.workflow.WorkflowException; import org.dspace.workflow.WorkflowException;
import org.dspace.workflow.factory.WorkflowServiceFactory; import org.dspace.workflow.factory.WorkflowServiceFactory;
import org.jdom.Element; import org.jdom2.Element;
/** /**
* Base class for package ingester of METS (Metadata Encoding and Transmission * Base class for package ingester of METS (Metadata Encoding and Transmission

View File

@@ -20,7 +20,7 @@ import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.content.crosswalk.MetadataValidationException; import org.dspace.content.crosswalk.MetadataValidationException;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.jdom.Element; import org.jdom2.Element;
/** /**
* Subclass of the METS packager framework to ingest a DSpace * Subclass of the METS packager framework to ingest a DSpace

View File

@@ -23,7 +23,7 @@ import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.factory.CoreServiceFactory; import org.dspace.core.factory.CoreServiceFactory;
import org.dspace.core.service.PluginService; import org.dspace.core.service.PluginService;
import org.jdom.Element; import org.jdom2.Element;
/** /**
* Packager plugin to ingest a * Packager plugin to ingest a

View File

@@ -35,15 +35,17 @@ import org.dspace.core.Context;
import org.dspace.core.factory.CoreServiceFactory; import org.dspace.core.factory.CoreServiceFactory;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.jdom.Content; import org.jdom2.Content;
import org.jdom.Document; import org.jdom2.Document;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.JDOMException; import org.jdom2.JDOMException;
import org.jdom.Namespace; import org.jdom2.Namespace;
import org.jdom.input.SAXBuilder; import org.jdom2.filter.Filters;
import org.jdom.output.Format; import org.jdom2.input.SAXBuilder;
import org.jdom.output.XMLOutputter; import org.jdom2.output.Format;
import org.jdom.xpath.XPath; import org.jdom2.output.XMLOutputter;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/** /**
* <P> * <P>
@@ -382,15 +384,12 @@ public class METSManifest {
public List getMdFiles() public List getMdFiles()
throws MetadataValidationException { throws MetadataValidationException {
if (mdFiles == null) { if (mdFiles == null) {
try {
// Use a special namespace with known prefix // Use a special namespace with known prefix
// so we get the right prefix. // so we get the right prefix.
XPath xpath = XPath.newInstance("descendant::mets:mdRef"); XPathExpression<Element> xpath =
xpath.addNamespace(metsNS); XPathFactory.instance()
mdFiles = xpath.selectNodes(mets); .compile("descendant::mets:mdRef", Filters.element(), null, metsNS);
} catch (JDOMException je) { mdFiles = xpath.evaluate(mets);
throw new MetadataValidationException("Failed while searching for mdRef elements in manifest: ", je);
}
} }
return mdFiles; return mdFiles;
} }
@@ -414,25 +413,22 @@ public class METSManifest {
return null; return null;
} }
try { XPathExpression<Element> xpath =
XPath xpath = XPath.newInstance( XPathFactory.instance()
"mets:fileSec/mets:fileGrp[@USE=\"CONTENT\"]/mets:file[@GROUPID=\"" + groupID + "\"]"); .compile(
xpath.addNamespace(metsNS); "mets:fileSec/mets:fileGrp[@USE=\"CONTENT\"]/mets:file[@GROUPID=\"" + groupID + "\"]",
List oFiles = xpath.selectNodes(mets); Filters.element(), null, metsNS);
List<Element> oFiles = xpath.evaluate(mets);
if (oFiles.size() > 0) { if (oFiles.size() > 0) {
if (log.isDebugEnabled()) { if (log.isDebugEnabled()) {
log.debug("Got ORIGINAL file for derived=" + file.toString()); log.debug("Got ORIGINAL file for derived=" + file.toString());
} }
Element flocat = ((Element) oFiles.get(0)).getChild("FLocat", metsNS); Element flocat = oFiles.get(0).getChild("FLocat", metsNS);
if (flocat != null) { if (flocat != null) {
return flocat.getAttributeValue("href", xlinkNS); return flocat.getAttributeValue("href", xlinkNS);
} }
} }
return null; return null;
} catch (JDOMException je) {
log.warn("Got exception on XPATH looking for Original file, " + je.toString());
return null;
}
} }
// translate bundle name from METS to DSpace; METS may be "CONTENT" // translate bundle name from METS to DSpace; METS may be "CONTENT"
@@ -888,20 +884,16 @@ public class METSManifest {
// use only when path varies each time you call it. // use only when path varies each time you call it.
protected Element getElementByXPath(String path, boolean nullOk) protected Element getElementByXPath(String path, boolean nullOk)
throws MetadataValidationException { throws MetadataValidationException {
try { XPathExpression<Element> xpath =
XPath xpath = XPath.newInstance(path); XPathFactory.instance()
xpath.addNamespace(metsNS); .compile(path, Filters.element(), null, metsNS, xlinkNS);
xpath.addNamespace(xlinkNS); Element result = xpath.evaluateFirst(mets);
Object result = xpath.selectSingleNode(mets);
if (result == null && nullOk) { if (result == null && nullOk) {
return null; return null;
} else if (result instanceof Element) { } else if (result == null && !nullOk) {
return (Element) result;
} else {
throw new MetadataValidationException("METSManifest: Failed to resolve XPath, path=\"" + path + "\""); throw new MetadataValidationException("METSManifest: Failed to resolve XPath, path=\"" + path + "\"");
} } else {
} catch (JDOMException je) { return result;
throw new MetadataValidationException("METSManifest: Failed to resolve XPath, path=\"" + path + "\"", je);
} }
} }

View File

@@ -35,7 +35,7 @@ import org.dspace.eperson.PasswordHash;
import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.EPersonService;
import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.GroupService;
import org.jdom.Namespace; import org.jdom2.Namespace;
/** /**
* Plugin to export all Group and EPerson objects in XML, perhaps for reloading. * Plugin to export all Group and EPerson objects in XML, perhaps for reloading.

View File

@@ -7,6 +7,7 @@
*/ */
package org.dspace.curate; package org.dspace.curate;
import java.io.ByteArrayInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.sql.SQLException; import java.sql.SQLException;
@@ -154,7 +155,8 @@ public class CitationPage extends AbstractCurationTask {
try { try {
//Create the cited document //Create the cited document
InputStream citedInputStream = InputStream citedInputStream =
citationDocument.makeCitedDocument(Curator.curationContext(), bitstream).getLeft(); new ByteArrayInputStream(
citationDocument.makeCitedDocument(Curator.curationContext(), bitstream).getLeft());
//Add the cited document to the approiate bundle //Add the cited document to the approiate bundle
this.addCitedPageToItem(citedInputStream, bundle, pBundle, this.addCitedPageToItem(citedInputStream, bundle, pBundle,
dBundle, displayMap, item, bitstream); dBundle, displayMap, item, bitstream);

View File

@@ -19,7 +19,6 @@ public class CurationCliScriptConfiguration extends CurationScriptConfiguration<
public Options getOptions() { public Options getOptions() {
options = super.getOptions(); options = super.getOptions();
options.addOption("e", "eperson", true, "email address of curating eperson"); options.addOption("e", "eperson", true, "email address of curating eperson");
options.getOption("e").setType(String.class);
options.getOption("e").setRequired(true); options.getOption("e").setRequired(true);
return options; return options;
} }

View File

@@ -74,25 +74,17 @@ public enum IndexClientOptions {
options options
.addOption("r", "remove", true, "remove an Item, Collection or Community from index based on its handle"); .addOption("r", "remove", true, "remove an Item, Collection or Community from index based on its handle");
options.getOption("r").setType(String.class);
options.addOption("i", "index", true, options.addOption("i", "index", true,
"add or update an Item, Collection or Community based on its handle or uuid"); "add or update an Item, Collection or Community based on its handle or uuid");
options.getOption("i").setType(boolean.class);
options.addOption("c", "clean", false, options.addOption("c", "clean", false,
"clean existing index removing any documents that no longer exist in the db"); "clean existing index removing any documents that no longer exist in the db");
options.getOption("c").setType(boolean.class);
options.addOption("d", "delete", false, options.addOption("d", "delete", false,
"delete all records from existing index"); "delete all records from existing index");
options.getOption("d").setType(boolean.class);
options.addOption("b", "build", false, "(re)build index, wiping out current one if it exists"); options.addOption("b", "build", false, "(re)build index, wiping out current one if it exists");
options.getOption("b").setType(boolean.class);
options.addOption("s", "spellchecker", false, "Rebuild the spellchecker, can be combined with -b and -f."); options.addOption("s", "spellchecker", false, "Rebuild the spellchecker, can be combined with -b and -f.");
options.getOption("s").setType(boolean.class);
options.addOption("f", "force", false, options.addOption("f", "force", false,
"if updating existing index, force each handle to be reindexed even if uptodate"); "if updating existing index, force each handle to be reindexed even if uptodate");
options.getOption("f").setType(boolean.class);
options.addOption("h", "help", false, "print this help message"); options.addOption("h", "help", false, "print this help message");
options.getOption("h").setType(boolean.class);
return options; return options;
} }
} }

View File

@@ -95,7 +95,6 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
100000); 100000);
// Use Tika's Text parser as the streams are always from the TEXT bundle (i.e. already extracted text) // Use Tika's Text parser as the streams are always from the TEXT bundle (i.e. already extracted text)
// TODO: We may wish to consider using Tika to extract the text in the future.
TextAndCSVParser tikaParser = new TextAndCSVParser(); TextAndCSVParser tikaParser = new TextAndCSVParser();
BodyContentHandler tikaHandler = new BodyContentHandler(charLimit); BodyContentHandler tikaHandler = new BodyContentHandler(charLimit);
Metadata tikaMetadata = new Metadata(); Metadata tikaMetadata = new Metadata();

View File

@@ -8,7 +8,6 @@
package org.dspace.disseminate; package org.dspace.disseminate;
import java.awt.Color; import java.awt.Color;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
@@ -297,7 +296,7 @@ public class CitationDocumentServiceImpl implements CitationDocumentService, Ini
} }
@Override @Override
public Pair<InputStream, Long> makeCitedDocument(Context context, Bitstream bitstream) public Pair<byte[], Long> makeCitedDocument(Context context, Bitstream bitstream)
throws IOException, SQLException, AuthorizeException { throws IOException, SQLException, AuthorizeException {
PDDocument document = new PDDocument(); PDDocument document = new PDDocument();
PDDocument sourceDocument = new PDDocument(); PDDocument sourceDocument = new PDDocument();
@@ -318,7 +317,7 @@ public class CitationDocumentServiceImpl implements CitationDocumentService, Ini
document.save(out); document.save(out);
byte[] data = out.toByteArray(); byte[] data = out.toByteArray();
return Pair.of(new ByteArrayInputStream(data), Long.valueOf(data.length)); return Pair.of(data, Long.valueOf(data.length));
} }
} finally { } finally {

View File

@@ -8,7 +8,6 @@
package org.dspace.disseminate.service; package org.dspace.disseminate.service;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException; import java.sql.SQLException;
import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.lang3.tuple.Pair;
@@ -84,7 +83,7 @@ public interface CitationDocumentService {
* @throws SQLException if database error * @throws SQLException if database error
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
*/ */
public Pair<InputStream, Long> makeCitedDocument(Context context, Bitstream bitstream) public Pair<byte[], Long> makeCitedDocument(Context context, Bitstream bitstream)
throws IOException, SQLException, AuthorizeException; throws IOException, SQLException, AuthorizeException;
/** /**

View File

@@ -7,10 +7,12 @@
*/ */
package org.dspace.eperson; package org.dspace.eperson;
import java.io.IOException;
import java.util.Date; import java.util.Date;
import java.util.UUID; import java.util.UUID;
import javax.mail.MessagingException; import javax.mail.MessagingException;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
@@ -30,16 +32,17 @@ import org.dspace.services.factory.DSpaceServicesFactory;
* Recommended filter: EPerson+Create * Recommended filter: EPerson+Create
* *
* @author Stuart Lewis * @author Stuart Lewis
* @version $Revision$
*/ */
public class EPersonConsumer implements Consumer { public class EPersonConsumer implements Consumer {
/** /**
* log4j logger * log4j logger
*/ */
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(EPersonConsumer.class); private static final Logger log
= org.apache.logging.log4j.LogManager.getLogger(EPersonConsumer.class);
protected EPersonService ePersonService protected EPersonService ePersonService
= EPersonServiceFactory.getInstance().getEPersonService(); = EPersonServiceFactory.getInstance().getEPersonService();
protected ConfigurationService configurationService protected ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService(); = DSpaceServicesFactory.getInstance().getConfigurationService();
@@ -74,6 +77,7 @@ public class EPersonConsumer implements Consumer {
if (et == Event.CREATE) { if (et == Event.CREATE) {
// Notify of new user registration // Notify of new user registration
String notifyRecipient = configurationService.getProperty("registration.notify"); String notifyRecipient = configurationService.getProperty("registration.notify");
EPerson eperson = ePersonService.find(context, id);
if (notifyRecipient == null) { if (notifyRecipient == null) {
notifyRecipient = ""; notifyRecipient = "";
} }
@@ -81,7 +85,6 @@ public class EPersonConsumer implements Consumer {
if (!notifyRecipient.equals("")) { if (!notifyRecipient.equals("")) {
try { try {
EPerson eperson = ePersonService.find(context, id);
Email adminEmail = Email Email adminEmail = Email
.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(), "registration_notify")); .getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(), "registration_notify"));
adminEmail.addRecipient(notifyRecipient); adminEmail.addRecipient(notifyRecipient);
@@ -103,6 +106,26 @@ public class EPersonConsumer implements Consumer {
"error_emailing_administrator", ""), me); "error_emailing_administrator", ""), me);
} }
} }
// If enabled, send a "welcome" message to the new EPerson.
if (configurationService.getBooleanProperty("mail.welcome.enabled", false)) {
String addressee = eperson.getEmail();
if (StringUtils.isNotBlank(addressee)) {
log.debug("Sending welcome email to {}", addressee);
try {
Email message = Email.getEmail(
I18nUtil.getEmailFilename(context.getCurrentLocale(), "welcome"));
message.addRecipient(addressee);
message.send();
} catch (IOException | MessagingException ex) {
log.warn("Welcome message not sent to {}: {}",
addressee, ex.getMessage());
}
} else {
log.warn("Welcome message not sent to EPerson {} because it has no email address.",
eperson.getID().toString());
}
}
} else if (et == Event.DELETE) { } else if (et == Event.DELETE) {
// TODO: Implement this if required // TODO: Implement this if required
} }

View File

@@ -31,7 +31,8 @@ import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity; import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.client.HttpClients;
import org.apache.log4j.Logger; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
@@ -55,7 +56,7 @@ public class GoogleAsyncEventListener extends AbstractUsageEventListener {
// 20 is the event max set by the GA API // 20 is the event max set by the GA API
private static final int GA_MAX_EVENTS = 20; private static final int GA_MAX_EVENTS = 20;
private static final String ANALYTICS_BATCH_ENDPOINT = "https://www.google-analytics.com/batch"; private static final String ANALYTICS_BATCH_ENDPOINT = "https://www.google-analytics.com/batch";
private static Logger log = Logger.getLogger(GoogleAsyncEventListener.class); private final static Logger log = LogManager.getLogger();
private static String analyticsKey; private static String analyticsKey;
private static CloseableHttpClient httpclient; private static CloseableHttpClient httpclient;
private static Buffer buffer; private static Buffer buffer;

View File

@@ -29,10 +29,10 @@ import org.dspace.harvest.dao.HarvestedCollectionDAO;
import org.dspace.harvest.service.HarvestedCollectionService; import org.dspace.harvest.service.HarvestedCollectionService;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.jdom.Document; import org.jdom2.Document;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.Namespace; import org.jdom2.Namespace;
import org.jdom.input.DOMBuilder; import org.jdom2.input.DOMBuilder;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.w3c.dom.DOMException; import org.w3c.dom.DOMException;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;

View File

@@ -70,11 +70,11 @@ import org.dspace.harvest.service.HarvestedCollectionService;
import org.dspace.harvest.service.HarvestedItemService; import org.dspace.harvest.service.HarvestedItemService;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.jdom.Document; import org.jdom2.Document;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.Namespace; import org.jdom2.Namespace;
import org.jdom.input.DOMBuilder; import org.jdom2.input.DOMBuilder;
import org.jdom.output.XMLOutputter; import org.jdom2.output.XMLOutputter;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;

View File

@@ -23,8 +23,8 @@ import org.dspace.core.factory.CoreServiceFactory;
import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.utils.DSpace; import org.dspace.utils.DSpace;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.output.XMLOutputter; import org.jdom2.output.XMLOutputter;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@@ -45,13 +45,13 @@ import org.dspace.core.factory.CoreServiceFactory;
import org.dspace.handle.service.HandleService; import org.dspace.handle.service.HandleService;
import org.dspace.identifier.DOI; import org.dspace.identifier.DOI;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.jdom.Document; import org.jdom2.Document;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.JDOMException; import org.jdom2.JDOMException;
import org.jdom.filter.ElementFilter; import org.jdom2.filter.ElementFilter;
import org.jdom.input.SAXBuilder; import org.jdom2.input.SAXBuilder;
import org.jdom.output.Format; import org.jdom2.output.Format;
import org.jdom.output.XMLOutputter; import org.jdom2.output.XMLOutputter;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;

View File

@@ -9,10 +9,10 @@ package org.dspace.importer.external.arxiv.metadatamapping.contributor;
import java.util.Collection; import java.util.Collection;
import org.apache.axiom.om.OMElement;
import org.dspace.importer.external.metadatamapping.MetadatumDTO; import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.dspace.importer.external.metadatamapping.contributor.MetadataContributor; import org.dspace.importer.external.metadatamapping.contributor.MetadataContributor;
import org.dspace.importer.external.metadatamapping.contributor.SimpleXpathMetadatumContributor; import org.dspace.importer.external.metadatamapping.contributor.SimpleXpathMetadatumContributor;
import org.jdom2.Element;
/** /**
* Arxiv specific implementation of {@link MetadataContributor} * Arxiv specific implementation of {@link MetadataContributor}
@@ -32,7 +32,7 @@ public class ArXivIdMetadataContributor extends SimpleXpathMetadatumContributor
* @return a collection of import records. Only the identifier of the found records may be put in the record. * @return a collection of import records. Only the identifier of the found records may be put in the record.
*/ */
@Override @Override
public Collection<MetadatumDTO> contributeMetadata(OMElement t) { public Collection<MetadatumDTO> contributeMetadata(Element t) {
Collection<MetadatumDTO> values = super.contributeMetadata(t); Collection<MetadatumDTO> values = super.contributeMetadata(t);
parseValue(values); parseValue(values);
return values; return values;

View File

@@ -7,8 +7,10 @@
*/ */
package org.dspace.importer.external.arxiv.service; package org.dspace.importer.external.arxiv.service;
import java.io.IOException;
import java.io.StringReader; import java.io.StringReader;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
@@ -20,10 +22,6 @@ import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.OMXMLBuilderFactory;
import org.apache.axiom.om.OMXMLParserWrapper;
import org.apache.axiom.om.xpath.AXIOMXPath;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord; import org.dspace.importer.external.datamodel.ImportRecord;
@@ -31,7 +29,14 @@ import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException; import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService; import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.QuerySource; import org.dspace.importer.external.service.components.QuerySource;
import org.jaxen.JaxenException; import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/** /**
* Implements a data source for querying ArXiv * Implements a data source for querying ArXiv
@@ -39,7 +44,7 @@ import org.jaxen.JaxenException;
* @author Pasquale Cavallo (pasquale.cavallo at 4Science dot it) * @author Pasquale Cavallo (pasquale.cavallo at 4Science dot it)
* *
*/ */
public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<OMElement> public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource { implements QuerySource {
private WebTarget webTarget; private WebTarget webTarget;
@@ -213,15 +218,20 @@ public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadata
Response response = invocationBuilder.get(); Response response = invocationBuilder.get();
if (response.getStatus() == 200) { if (response.getStatus() == 200) {
String responseString = response.readEntity(String.class); String responseString = response.readEntity(String.class);
OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(responseString));
OMElement element = records.getDocumentElement(); SAXBuilder saxBuilder = new SAXBuilder();
AXIOMXPath xpath = null; Document document = saxBuilder.build(new StringReader(responseString));
Element root = document.getRootElement();
List namespaces = Arrays.asList(Namespace.getNamespace("opensearch",
"http://a9.com/-/spec/opensearch/1.1/"));
XPathExpression<Element> xpath =
XPathFactory.instance().compile("opensearch:totalResults", Filters.element(), null, namespaces);
Element count = xpath.evaluateFirst(root);
try { try {
xpath = new AXIOMXPath("opensearch:totalResults");
xpath.addNamespace("opensearch", "http://a9.com/-/spec/opensearch/1.1/");
OMElement count = (OMElement) xpath.selectSingleNode(element);
return Integer.parseInt(count.getText()); return Integer.parseInt(count.getText());
} catch (JaxenException e) { } catch (NumberFormatException e) {
return null; return null;
} }
} else { } else {
@@ -274,8 +284,8 @@ public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadata
Response response = invocationBuilder.get(); Response response = invocationBuilder.get();
if (response.getStatus() == 200) { if (response.getStatus() == 200) {
String responseString = response.readEntity(String.class); String responseString = response.readEntity(String.class);
List<OMElement> omElements = splitToRecords(responseString); List<Element> elements = splitToRecords(responseString);
for (OMElement record : omElements) { for (Element record : elements) {
results.add(transformSourceRecords(record)); results.add(transformSourceRecords(record));
} }
return results; return results;
@@ -321,8 +331,8 @@ public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadata
Response response = invocationBuilder.get(); Response response = invocationBuilder.get();
if (response.getStatus() == 200) { if (response.getStatus() == 200) {
String responseString = response.readEntity(String.class); String responseString = response.readEntity(String.class);
List<OMElement> omElements = splitToRecords(responseString); List<Element> elements = splitToRecords(responseString);
for (OMElement record : omElements) { for (Element record : elements) {
results.add(transformSourceRecords(record)); results.add(transformSourceRecords(record));
} }
return results; return results;
@@ -359,8 +369,8 @@ public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadata
Response response = invocationBuilder.get(); Response response = invocationBuilder.get();
if (response.getStatus() == 200) { if (response.getStatus() == 200) {
String responseString = response.readEntity(String.class); String responseString = response.readEntity(String.class);
List<OMElement> omElements = splitToRecords(responseString); List<Element> elements = splitToRecords(responseString);
for (OMElement record : omElements) { for (Element record : elements) {
results.add(transformSourceRecords(record)); results.add(transformSourceRecords(record));
} }
return results; return results;
@@ -387,16 +397,21 @@ public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadata
} }
} }
private List<OMElement> splitToRecords(String recordsSrc) { private List<Element> splitToRecords(String recordsSrc) {
OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(recordsSrc));
OMElement element = records.getDocumentElement();
AXIOMXPath xpath = null;
try { try {
xpath = new AXIOMXPath("ns:entry"); SAXBuilder saxBuilder = new SAXBuilder();
xpath.addNamespace("ns", "http://www.w3.org/2005/Atom"); Document document = saxBuilder.build(new StringReader(recordsSrc));
List<OMElement> recordsList = xpath.selectNodes(element); Element root = document.getRootElement();
List namespaces = Arrays.asList(Namespace.getNamespace("ns",
"http://www.w3.org/2005/Atom"));
XPathExpression<Element> xpath =
XPathFactory.instance().compile("ns:entry", Filters.element(), null, namespaces);
List<Element> recordsList = xpath.evaluate(root);
return recordsList; return recordsList;
} catch (JaxenException e) { } catch (JDOMException | IOException e) {
return null; return null;
} }
} }

View File

@@ -10,17 +10,20 @@ package org.dspace.importer.external.metadatamapping.contributor;
import java.text.DateFormat; import java.text.DateFormat;
import java.text.ParseException; import java.text.ParseException;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import org.apache.axiom.om.OMAttribute;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.OMText;
import org.apache.axiom.om.xpath.AXIOMXPath;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadatumDTO; import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jaxen.JaxenException; import org.jdom2.Attribute;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.Text;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
public class SimpleXpathDateFormatMetadataContributor extends SimpleXpathMetadatumContributor { public class SimpleXpathDateFormatMetadataContributor extends SimpleXpathMetadatumContributor {
@@ -38,32 +41,34 @@ public class SimpleXpathDateFormatMetadataContributor extends SimpleXpathMetadat
} }
@Override @Override
public Collection<MetadatumDTO> contributeMetadata(OMElement t) { public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>(); List<MetadatumDTO> values = new LinkedList<>();
try {
AXIOMXPath xpath = new AXIOMXPath(query); List<Namespace> namespaces = new ArrayList<>();
for (String ns : prefixToNamespaceMapping.keySet()) { for (String ns : prefixToNamespaceMapping.keySet()) {
xpath.addNamespace(prefixToNamespaceMapping.get(ns), ns); namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
} }
List<Object> nodes = xpath.selectNodes(t);
XPathExpression<Object> xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,
namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) { for (Object el : nodes) {
if (el instanceof OMElement) { if (el instanceof Element) {
values.add(getMetadatum(field, ((OMElement) el).getText())); values.add(getMetadatum(field, ((Element) el).getText()));
} else if (el instanceof OMAttribute) { } else if (el instanceof Attribute) {
values.add(getMetadatum(field, ((OMAttribute) el).getAttributeValue())); values.add(getMetadatum(field, ((Attribute) el).getValue()));
} else if (el instanceof String) { } else if (el instanceof String) {
values.add(getMetadatum(field, (String) el)); values.add(getMetadatum(field, (String) el));
} else if (el instanceof OMText) { } else if (el instanceof Text) {
values.add(metadataFieldMapping.toDCValue(field, ((OMText) el).getText())); values.add(metadataFieldMapping.toDCValue(field, ((Text) el).getText()));
} else { } else {
System.err.println("node of type: " + el.getClass()); System.err.println("node of type: " + el.getClass());
} }
} }
return values; return values;
} catch (JaxenException e) {
System.err.println(query);
throw new RuntimeException(e);
}
} }
private MetadatumDTO getMetadatum(MetadataFieldConfig field, String value) { private MetadatumDTO getMetadatum(MetadataFieldConfig field, String value) {

View File

@@ -7,33 +7,36 @@
*/ */
package org.dspace.importer.external.metadatamapping.contributor; package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import javax.annotation.Resource; import javax.annotation.Resource;
import org.apache.axiom.om.OMAttribute; import org.apache.logging.log4j.Logger;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.OMText;
import org.apache.axiom.om.xpath.AXIOMXPath;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
import org.dspace.importer.external.metadatamapping.MetadatumDTO; import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jaxen.JaxenException; import org.jdom2.Attribute;
import org.slf4j.Logger; import org.jdom2.Element;
import org.slf4j.LoggerFactory; import org.jdom2.Namespace;
import org.jdom2.Text;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
/** /**
* Metadata contributor that takes an axiom OMElement and turns it into a metadatum * Metadata contributor that takes a JDOM Element and turns it into a metadatum
* *
* @author Roeland Dillen (roeland at atmire dot com) * @author Roeland Dillen (roeland at atmire dot com)
*/ */
public class SimpleXpathMetadatumContributor implements MetadataContributor<OMElement> { public class SimpleXpathMetadatumContributor implements MetadataContributor<Element> {
protected MetadataFieldConfig field; protected MetadataFieldConfig field;
private static final Logger log = LoggerFactory.getLogger(SimpleXpathMetadatumContributor.class); private static final Logger log
= org.apache.logging.log4j.LogManager.getLogger();
/** /**
* Return prefixToNamespaceMapping * Return prefixToNamespaceMapping
@@ -44,14 +47,14 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor<OMEl
return prefixToNamespaceMapping; return prefixToNamespaceMapping;
} }
protected MetadataFieldMapping<OMElement, MetadataContributor<OMElement>> metadataFieldMapping; protected MetadataFieldMapping<Element, MetadataContributor<Element>> metadataFieldMapping;
/** /**
* Return metadataFieldMapping * Return metadataFieldMapping
* *
* @return MetadataFieldMapping * @return MetadataFieldMapping
*/ */
public MetadataFieldMapping<OMElement, MetadataContributor<OMElement>> getMetadataFieldMapping() { public MetadataFieldMapping<Element, MetadataContributor<Element>> getMetadataFieldMapping() {
return metadataFieldMapping; return metadataFieldMapping;
} }
@@ -62,7 +65,7 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor<OMEl
*/ */
@Override @Override
public void setMetadataFieldMapping( public void setMetadataFieldMapping(
MetadataFieldMapping<OMElement, MetadataContributor<OMElement>> metadataFieldMapping) { MetadataFieldMapping<Element, MetadataContributor<Element>> metadataFieldMapping) {
this.metadataFieldMapping = metadataFieldMapping; this.metadataFieldMapping = metadataFieldMapping;
} }
@@ -140,36 +143,35 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor<OMEl
* Depending on the retrieved node (using the query), different types of values will be added to the MetadatumDTO * Depending on the retrieved node (using the query), different types of values will be added to the MetadatumDTO
* list * list
* *
* @param t A class to retrieve metadata from. * @param t An element to retrieve metadata from.
* @return a collection of import records. Only the identifier of the found records may be put in the record. * @return a collection of import records. Only the identifier of the found records may be put in the record.
*/ */
@Override @Override
public Collection<MetadatumDTO> contributeMetadata(OMElement t) { public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>(); List<MetadatumDTO> values = new LinkedList<>();
try {
AXIOMXPath xpath = new AXIOMXPath(query); List<Namespace> namespaces = new ArrayList<>();
for (String ns : prefixToNamespaceMapping.keySet()) { for (String ns : prefixToNamespaceMapping.keySet()) {
xpath.addNamespace(prefixToNamespaceMapping.get(ns), ns); namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
} }
List<Object> nodes = xpath.selectNodes(t);
XPathExpression<Object> xpath =
XPathFactory.instance().compile(query, Filters.fpassthrough(), null, namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) { for (Object el : nodes) {
if (el instanceof OMElement) { if (el instanceof Element) {
values.add(metadataFieldMapping.toDCValue(field, ((OMElement) el).getText())); values.add(metadataFieldMapping.toDCValue(field, ((Element) el).getText()));
} else if (el instanceof OMAttribute) { } else if (el instanceof Attribute) {
values.add(metadataFieldMapping.toDCValue(field, ((OMAttribute) el).getAttributeValue())); values.add(metadataFieldMapping.toDCValue(field, ((Attribute) el).getValue()));
} else if (el instanceof String) { } else if (el instanceof String) {
values.add(metadataFieldMapping.toDCValue(field, (String) el)); values.add(metadataFieldMapping.toDCValue(field, (String) el));
} else if (el instanceof OMText) { } else if (el instanceof Text) {
values.add(metadataFieldMapping.toDCValue(field, ((OMText) el).getText())); values.add(metadataFieldMapping.toDCValue(field, ((Text) el).getText()));
} else { } else {
log.error("node of type: " + el.getClass()); log.error("Encountered unsupported XML node of type: {}. Skipped that node.", el.getClass());
} }
} }
return values; return values;
} catch (JaxenException e) {
log.error(query, e);
throw new RuntimeException(e);
}
} }
} }

View File

@@ -7,8 +7,10 @@
*/ */
package org.dspace.importer.external.openaire.service; package org.dspace.importer.external.openaire.service;
import java.io.IOException;
import java.io.StringReader; import java.io.StringReader;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
@@ -19,10 +21,6 @@ import javax.ws.rs.client.Invocation;
import javax.ws.rs.client.WebTarget; import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.OMXMLBuilderFactory;
import org.apache.axiom.om.OMXMLParserWrapper;
import org.apache.axiom.om.xpath.AXIOMXPath;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord; import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query; import org.dspace.importer.external.datamodel.Query;
@@ -30,14 +28,21 @@ import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.metadatamapping.MetadatumDTO; import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService; import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.QuerySource; import org.dspace.importer.external.service.components.QuerySource;
import org.jaxen.JaxenException; import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/** /**
* Implements a data source for querying OpenAIRE * Implements a data source for querying OpenAIRE
* *
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/ */
public class OpenAireImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<OMElement> public class OpenAireImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource { implements QuerySource {
private String baseAddress; private String baseAddress;
@@ -205,9 +210,9 @@ public class OpenAireImportMetadataSourceServiceImpl extends AbstractImportMetad
Response response = invocationBuilder.get(); Response response = invocationBuilder.get();
if (response.getStatus() == 200) { if (response.getStatus() == 200) {
String responseString = response.readEntity(String.class); String responseString = response.readEntity(String.class);
List<OMElement> omElements = splitToRecords(responseString); List<Element> omElements = splitToRecords(responseString);
if (omElements != null) { if (omElements != null) {
for (OMElement record : omElements) { for (Element record : omElements) {
results.add(filterMultipleTitles(transformSourceRecords(record))); results.add(filterMultipleTitles(transformSourceRecords(record)));
} }
} }
@@ -237,16 +242,17 @@ public class OpenAireImportMetadataSourceServiceImpl extends AbstractImportMetad
Response response = invocationBuilder.get(); Response response = invocationBuilder.get();
if (response.getStatus() == 200) { if (response.getStatus() == 200) {
String responseString = response.readEntity(String.class); String responseString = response.readEntity(String.class);
OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(responseString));
OMElement element = records.getDocumentElement(); SAXBuilder saxBuilder = new SAXBuilder();
AXIOMXPath xpath = null; Document document = saxBuilder.build(new StringReader(responseString));
try { Element root = document.getRootElement();
xpath = new AXIOMXPath("/response/header/total");
OMElement totalItem = (OMElement) xpath.selectSingleNode(element); XPathExpression<Element> xpath = XPathFactory.instance().compile("/header/total",
Filters.element(), null);
Element totalItem = (Element) xpath.evaluateFirst(root);
return totalItem != null ? Integer.parseInt(totalItem.getText()) : null; return totalItem != null ? Integer.parseInt(totalItem.getText()) : null;
} catch (JaxenException e) {
return 0;
}
} else { } else {
return 0; return 0;
} }
@@ -282,9 +288,9 @@ public class OpenAireImportMetadataSourceServiceImpl extends AbstractImportMetad
Response response = invocationBuilder.get(); Response response = invocationBuilder.get();
if (response.getStatus() == 200) { if (response.getStatus() == 200) {
String responseString = response.readEntity(String.class); String responseString = response.readEntity(String.class);
List<OMElement> omElements = splitToRecords(responseString); List<Element> omElements = splitToRecords(responseString);
if (omElements != null) { if (omElements != null) {
for (OMElement record : omElements) { for (Element record : omElements) {
results.add(filterMultipleTitles(transformSourceRecords(record))); results.add(filterMultipleTitles(transformSourceRecords(record)));
} }
} }
@@ -316,18 +322,23 @@ public class OpenAireImportMetadataSourceServiceImpl extends AbstractImportMetad
return new ImportRecord(nextSourceRecord); return new ImportRecord(nextSourceRecord);
} }
private List<OMElement> splitToRecords(String recordsSrc) { private List<Element> splitToRecords(String recordsSrc) {
OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(recordsSrc));
OMElement element = records.getDocumentElement();
AXIOMXPath xpath = null;
try { try {
xpath = new AXIOMXPath("/response/results/result"); SAXBuilder saxBuilder = new SAXBuilder();
xpath.addNamespace("dri", "http://www.driver-repository.eu/namespace/dri"); Document document = saxBuilder.build(new StringReader(recordsSrc));
xpath.addNamespace("oaf", "http://namespace.openaire.eu/oaf"); Element root = document.getRootElement();
xpath.addNamespace("xsi", "http://www.w3.org/2001/XMLSchema-instance");
List<OMElement> recordsList = xpath.selectNodes(element); List namespaces = Arrays.asList(
Namespace.getNamespace("dri", "http://www.driver-repository.eu/namespace/dri"),
Namespace.getNamespace("oaf", "http://namespace.openaire.eu/oaf"),
Namespace.getNamespace("xsi", "http://www.w3.org/2001/XMLSchema-instance"));
XPathExpression<Element> xpath = XPathFactory.instance().compile("/results/result",
Filters.element(), null, namespaces);
List<Element> recordsList = xpath.evaluate(root);
return recordsList; return recordsList;
} catch (JaxenException e) { } catch (JDOMException | IOException e) {
return null; return null;
} }
} }

View File

@@ -25,10 +25,6 @@ import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import com.google.common.io.CharStreams; import com.google.common.io.CharStreams;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.OMXMLBuilderFactory;
import org.apache.axiom.om.OMXMLParserWrapper;
import org.apache.axiom.om.xpath.AXIOMXPath;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord; import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query; import org.dspace.importer.external.datamodel.Query;
@@ -38,7 +34,13 @@ import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService; import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.FileSource; import org.dspace.importer.external.service.components.FileSource;
import org.dspace.importer.external.service.components.QuerySource; import org.dspace.importer.external.service.components.QuerySource;
import org.jaxen.JaxenException; import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/** /**
* Implements a data source for querying PubMed Central * Implements a data source for querying PubMed Central
@@ -46,7 +48,7 @@ import org.jaxen.JaxenException;
* @author Roeland Dillen (roeland at atmire dot com) * @author Roeland Dillen (roeland at atmire dot com)
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/ */
public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<OMElement> public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource, FileSource { implements QuerySource, FileSource {
private String baseAddress; private String baseAddress;
@@ -243,17 +245,21 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
private String getSingleElementValue(String src, String elementName) { private String getSingleElementValue(String src, String elementName) {
OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(src));
OMElement element = records.getDocumentElement();
AXIOMXPath xpath = null;
String value = null; String value = null;
try { try {
xpath = new AXIOMXPath("//" + elementName); SAXBuilder saxBuilder = new SAXBuilder();
List<OMElement> recordsList = xpath.selectNodes(element); Document document = saxBuilder.build(new StringReader(src));
if (!recordsList.isEmpty()) { Element root = document.getRootElement();
value = recordsList.get(0).getText();
XPathExpression<Element> xpath =
XPathFactory.instance().compile("//" + elementName, Filters.element());
Element record = xpath.evaluateFirst(root);
if (record != null) {
value = record.getText();
} }
} catch (JaxenException e) { } catch (JDOMException | IOException e) {
value = null; value = null;
} }
return value; return value;
@@ -314,9 +320,9 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
invocationBuilder = getRecordsTarget.request(MediaType.TEXT_PLAIN_TYPE); invocationBuilder = getRecordsTarget.request(MediaType.TEXT_PLAIN_TYPE);
response = invocationBuilder.get(); response = invocationBuilder.get();
List<OMElement> omElements = splitToRecords(response.readEntity(String.class)); List<Element> elements = splitToRecords(response.readEntity(String.class));
for (OMElement record : omElements) { for (Element record : elements) {
records.add(transformSourceRecords(record)); records.add(transformSourceRecords(record));
} }
@@ -324,15 +330,18 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
} }
} }
private List<OMElement> splitToRecords(String recordsSrc) { private List<Element> splitToRecords(String recordsSrc) {
OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(recordsSrc));
OMElement element = records.getDocumentElement();
AXIOMXPath xpath = null;
try { try {
xpath = new AXIOMXPath("//PubmedArticle"); SAXBuilder saxBuilder = new SAXBuilder();
List<OMElement> recordsList = xpath.selectNodes(element); Document document = saxBuilder.build(new StringReader(recordsSrc));
Element root = document.getRootElement();
XPathExpression<Element> xpath =
XPathFactory.instance().compile("//PubmedArticle", Filters.element());
List<Element> recordsList = xpath.evaluate(root);
return recordsList; return recordsList;
} catch (JaxenException e) { } catch (JDOMException | IOException e) {
return null; return null;
} }
} }
@@ -362,13 +371,13 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
Response response = invocationBuilder.get(); Response response = invocationBuilder.get();
List<OMElement> omElements = splitToRecords(response.readEntity(String.class)); List<Element> elements = splitToRecords(response.readEntity(String.class));
if (omElements.size() == 0) { if (elements.isEmpty()) {
return null; return null;
} }
return transformSourceRecords(omElements.get(0)); return transformSourceRecords(elements.get(0));
} }
} }
@@ -441,8 +450,8 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
private List<ImportRecord> parseXMLString(String xml) { private List<ImportRecord> parseXMLString(String xml) {
List<ImportRecord> records = new LinkedList<ImportRecord>(); List<ImportRecord> records = new LinkedList<ImportRecord>();
List<OMElement> omElements = splitToRecords(xml); List<Element> elements = splitToRecords(xml);
for (OMElement record : omElements) { for (Element record : elements) {
records.add(transformSourceRecords(record)); records.add(transformSourceRecords(record));
} }
return records; return records;

View File

@@ -10,7 +10,7 @@ package org.dspace.license;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
import org.jdom.Document; import org.jdom2.Document;
/** /**
* Service interface class for the Creative commons license connector service. * Service interface class for the Creative commons license connector service.

View File

@@ -32,13 +32,14 @@ import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.util.EntityUtils; import org.apache.http.util.EntityUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.jaxen.JaxenException; import org.jdom2.Attribute;
import org.jaxen.jdom.JDOMXPath; import org.jdom2.Document;
import org.jdom.Attribute; import org.jdom2.Element;
import org.jdom.Document; import org.jdom2.JDOMException;
import org.jdom.Element; import org.jdom2.filter.Filters;
import org.jdom.JDOMException; import org.jdom2.input.SAXBuilder;
import org.jdom.input.SAXBuilder; import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.xml.sax.InputSource; import org.xml.sax.InputSource;
@@ -96,7 +97,7 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService,
List<String> licenses; List<String> licenses;
try (CloseableHttpResponse response = client.execute(httpGet)) { try (CloseableHttpResponse response = client.execute(httpGet)) {
licenses = retrieveLicenses(response); licenses = retrieveLicenses(response);
} catch (JDOMException | JaxenException | IOException e) { } catch (JDOMException | IOException e) {
log.error("Error while retrieving the license details using url: " + uri, e); log.error("Error while retrieving the license details using url: " + uri, e);
licenses = Collections.emptyList(); licenses = Collections.emptyList();
} }
@@ -110,7 +111,7 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService,
try (CloseableHttpResponse response = client.execute(licenseHttpGet)) { try (CloseableHttpResponse response = client.execute(licenseHttpGet)) {
CCLicense ccLicense = retrieveLicenseObject(license, response); CCLicense ccLicense = retrieveLicenseObject(license, response);
ccLicenses.put(ccLicense.getLicenseId(), ccLicense); ccLicenses.put(ccLicense.getLicenseId(), ccLicense);
} catch (JaxenException | JDOMException | IOException e) { } catch (JDOMException | IOException e) {
log.error("Error while retrieving the license details using url: " + licenseUri, e); log.error("Error while retrieving the license details using url: " + licenseUri, e);
} }
} }
@@ -125,25 +126,23 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService,
* @param response The response from the API * @param response The response from the API
* @return a list of license identifiers for which details need to be retrieved * @return a list of license identifiers for which details need to be retrieved
* @throws IOException * @throws IOException
* @throws JaxenException
* @throws JDOMException * @throws JDOMException
*/ */
private List<String> retrieveLicenses(CloseableHttpResponse response) private List<String> retrieveLicenses(CloseableHttpResponse response)
throws IOException, JaxenException, JDOMException { throws IOException, JDOMException {
List<String> domains = new LinkedList<>(); List<String> domains = new LinkedList<>();
String[] excludedLicenses = configurationService.getArrayProperty("cc.license.classfilter"); String[] excludedLicenses = configurationService.getArrayProperty("cc.license.classfilter");
String responseString = EntityUtils.toString(response.getEntity()); String responseString = EntityUtils.toString(response.getEntity());
JDOMXPath licenseClassXpath = new JDOMXPath("//licenses/license"); XPathExpression<Element> licenseClassXpath =
XPathFactory.instance().compile("//licenses/license", Filters.element());
try (StringReader stringReader = new StringReader(responseString)) { try (StringReader stringReader = new StringReader(responseString)) {
InputSource is = new InputSource(stringReader); InputSource is = new InputSource(stringReader);
org.jdom.Document classDoc = this.parser.build(is); org.jdom2.Document classDoc = this.parser.build(is);
List<Element> elements = licenseClassXpath.selectNodes(classDoc); List<Element> elements = licenseClassXpath.evaluate(classDoc);
for (Element element : elements) { for (Element element : elements) {
String licenseId = getSingleNodeValue(element, "@id"); String licenseId = getSingleNodeValue(element, "@id");
if (StringUtils.isNotBlank(licenseId) && !ArrayUtils.contains(excludedLicenses, licenseId)) { if (StringUtils.isNotBlank(licenseId) && !ArrayUtils.contains(excludedLicenses, licenseId)) {
@@ -163,30 +162,29 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService,
* @param response for a specific CC License response * @param response for a specific CC License response
* @return the corresponding CC License Object * @return the corresponding CC License Object
* @throws IOException * @throws IOException
* @throws JaxenException
* @throws JDOMException * @throws JDOMException
*/ */
private CCLicense retrieveLicenseObject(final String licenseId, CloseableHttpResponse response) private CCLicense retrieveLicenseObject(final String licenseId, CloseableHttpResponse response)
throws IOException, JaxenException, JDOMException { throws IOException, JDOMException {
String responseString = EntityUtils.toString(response.getEntity()); String responseString = EntityUtils.toString(response.getEntity());
XPathExpression<Object> licenseClassXpath =
JDOMXPath licenseClassXpath = new JDOMXPath("//licenseclass"); XPathFactory.instance().compile("//licenseclass", Filters.fpassthrough());
JDOMXPath licenseFieldXpath = new JDOMXPath("field"); XPathExpression<Element> licenseFieldXpath =
XPathFactory.instance().compile("field", Filters.element());
try (StringReader stringReader = new StringReader(responseString)) { try (StringReader stringReader = new StringReader(responseString)) {
InputSource is = new InputSource(stringReader); InputSource is = new InputSource(stringReader);
org.jdom.Document classDoc = this.parser.build(is); org.jdom2.Document classDoc = this.parser.build(is);
Object element = licenseClassXpath.selectSingleNode(classDoc); Object element = licenseClassXpath.evaluateFirst(classDoc);
String licenseLabel = getSingleNodeValue(element, "label"); String licenseLabel = getSingleNodeValue(element, "label");
List<CCLicenseField> ccLicenseFields = new LinkedList<>(); List<CCLicenseField> ccLicenseFields = new LinkedList<>();
List<Element> licenseFields = licenseFieldXpath.selectNodes(element); List<Element> licenseFields = licenseFieldXpath.evaluate(element);
for (Element licenseField : licenseFields) { for (Element licenseField : licenseFields) {
CCLicenseField ccLicenseField = parseLicenseField(licenseField); CCLicenseField ccLicenseField = parseLicenseField(licenseField);
ccLicenseFields.add(ccLicenseField); ccLicenseFields.add(ccLicenseField);
@@ -196,13 +194,14 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService,
} }
} }
private CCLicenseField parseLicenseField(final Element licenseField) throws JaxenException { private CCLicenseField parseLicenseField(final Element licenseField) {
String id = getSingleNodeValue(licenseField, "@id"); String id = getSingleNodeValue(licenseField, "@id");
String label = getSingleNodeValue(licenseField, "label"); String label = getSingleNodeValue(licenseField, "label");
String description = getSingleNodeValue(licenseField, "description"); String description = getSingleNodeValue(licenseField, "description");
JDOMXPath enumXpath = new JDOMXPath("enum"); XPathExpression<Element> enumXpath =
List<Element> enums = enumXpath.selectNodes(licenseField); XPathFactory.instance().compile("enum", Filters.element());
List<Element> enums = enumXpath.evaluate(licenseField);
List<CCLicenseFieldEnum> ccLicenseFieldEnumList = new LinkedList<>(); List<CCLicenseFieldEnum> ccLicenseFieldEnumList = new LinkedList<>();
@@ -215,7 +214,7 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService,
} }
private CCLicenseFieldEnum parseEnum(final Element enumElement) throws JaxenException { private CCLicenseFieldEnum parseEnum(final Element enumElement) {
String id = getSingleNodeValue(enumElement, "@id"); String id = getSingleNodeValue(enumElement, "@id");
String label = getSingleNodeValue(enumElement, "label"); String label = getSingleNodeValue(enumElement, "label");
String description = getSingleNodeValue(enumElement, "description"); String description = getSingleNodeValue(enumElement, "description");
@@ -236,9 +235,10 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService,
} }
} }
private String getSingleNodeValue(final Object t, String query) throws JaxenException { private String getSingleNodeValue(final Object t, String query) {
JDOMXPath xpath = new JDOMXPath(query); XPathExpression xpath =
Object singleNode = xpath.selectSingleNode(t); XPathFactory.instance().compile(query, Filters.fpassthrough());
Object singleNode = xpath.evaluateFirst(t);
return getNodeValue(singleNode); return getNodeValue(singleNode);
} }
@@ -273,7 +273,7 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService,
try (CloseableHttpResponse response = client.execute(httpPost)) { try (CloseableHttpResponse response = client.execute(httpPost)) {
return retrieveLicenseUri(response); return retrieveLicenseUri(response);
} catch (JDOMException | JaxenException | IOException e) { } catch (JDOMException | IOException e) {
log.error("Error while retrieving the license uri for license : " + licenseId + " with answers " log.error("Error while retrieving the license uri for license : " + licenseId + " with answers "
+ answerMap.toString(), e); + answerMap.toString(), e);
} }
@@ -286,21 +286,20 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService,
* @param response for a specific CC License URI response * @param response for a specific CC License URI response
* @return the corresponding CC License URI as a string * @return the corresponding CC License URI as a string
* @throws IOException * @throws IOException
* @throws JaxenException
* @throws JDOMException * @throws JDOMException
*/ */
private String retrieveLicenseUri(final CloseableHttpResponse response) private String retrieveLicenseUri(final CloseableHttpResponse response)
throws IOException, JaxenException, JDOMException { throws IOException, JDOMException {
String responseString = EntityUtils.toString(response.getEntity()); String responseString = EntityUtils.toString(response.getEntity());
JDOMXPath licenseClassXpath = new JDOMXPath("//result/license-uri"); XPathExpression<Object> licenseClassXpath =
XPathFactory.instance().compile("//result/license-uri", Filters.fpassthrough());
try (StringReader stringReader = new StringReader(responseString)) { try (StringReader stringReader = new StringReader(responseString)) {
InputSource is = new InputSource(stringReader); InputSource is = new InputSource(stringReader);
org.jdom.Document classDoc = this.parser.build(is); org.jdom2.Document classDoc = this.parser.build(is);
Object node = licenseClassXpath.selectSingleNode(classDoc); Object node = licenseClassXpath.evaluateFirst(classDoc);
String nodeValue = getNodeValue(node); String nodeValue = getNodeValue(node);
if (StringUtils.isNotBlank(nodeValue)) { if (StringUtils.isNotBlank(nodeValue)) {
@@ -364,12 +363,7 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService,
* @return the license name * @return the license name
*/ */
public String retrieveLicenseName(final Document doc) { public String retrieveLicenseName(final Document doc) {
try {
return getSingleNodeValue(doc, "//result/license-name"); return getSingleNodeValue(doc, "//result/license-name");
} catch (JaxenException e) {
log.error("Error while retrieving the license name from the license document", e);
}
return null;
} }
} }

View File

@@ -40,8 +40,8 @@ import org.dspace.core.Utils;
import org.dspace.license.service.CreativeCommonsService; import org.dspace.license.service.CreativeCommonsService;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.jdom.Document; import org.jdom2.Document;
import org.jdom.transform.JDOMSource; import org.jdom2.transform.JDOMSource;
import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;

View File

@@ -18,7 +18,7 @@ import org.dspace.content.Bitstream;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.license.CCLicense; import org.dspace.license.CCLicense;
import org.jdom.Document; import org.jdom2.Document;
/** /**
* Service interface class for the Creative commons licensing. * Service interface class for the Creative commons licensing.

View File

@@ -56,15 +56,12 @@ public class RetryFailedOpenUrlTrackerScriptConfiguration<T extends RetryFailedO
Options options = new Options(); Options options = new Options();
options.addOption("a", true, "Add a new \"failed\" row to the table with a url (test purposes only)"); options.addOption("a", true, "Add a new \"failed\" row to the table with a url (test purposes only)");
options.getOption("a").setType(String.class);
options.addOption("r", false, options.addOption("r", false,
"Retry sending requests to all urls stored in the table with failed requests. " + "Retry sending requests to all urls stored in the table with failed requests. " +
"This includes the url that can be added through the -a option."); "This includes the url that can be added through the -a option.");
options.getOption("r").setType(boolean.class);
options.addOption("h", "help", false, "print this help message"); options.addOption("h", "help", false, "print this help message");
options.getOption("h").setType(boolean.class);
super.options = options; super.options = options;
} }

View File

@@ -49,6 +49,16 @@ public class EntityTypeServiceInitializer implements Callback {
} }
} }
/**
* The callback name, Flyway will use this to sort the callbacks alphabetically before executing them
* @return The callback name
*/
@Override
public String getCallbackName() {
// Return class name only (not prepended by package)
return EntityTypeServiceInitializer.class.getSimpleName();
}
@Override @Override
public boolean supports(Event event, org.flywaydb.core.api.callback.Context context) { public boolean supports(Event event, org.flywaydb.core.api.callback.Context context) {
// Must run AFTER all migrations complete, since it is dependent on Hibernate // Must run AFTER all migrations complete, since it is dependent on Hibernate

View File

@@ -51,6 +51,16 @@ public class GroupServiceInitializer implements Callback {
} }
/**
* The callback name, Flyway will use this to sort the callbacks alphabetically before executing them
* @return The callback name
*/
@Override
public String getCallbackName() {
// Return class name only (not prepended by package)
return GroupServiceInitializer.class.getSimpleName();
}
/** /**
* Events supported by this callback. * Events supported by this callback.
* @param event Flyway event * @param event Flyway event

View File

@@ -97,6 +97,16 @@ public class PostgreSQLCryptoChecker implements Callback {
} }
} }
/**
* The callback name, Flyway will use this to sort the callbacks alphabetically before executing them
* @return The callback name
*/
@Override
public String getCallbackName() {
// Return class name only (not prepended by package)
return PostgreSQLCryptoChecker.class.getSimpleName();
}
/** /**
* Events supported by this callback. * Events supported by this callback.
* @param event Flyway event * @param event Flyway event

View File

@@ -101,6 +101,16 @@ public class RegistryUpdater implements Callback {
} }
/**
* The callback name, Flyway will use this to sort the callbacks alphabetically before executing them
* @return The callback name
*/
@Override
public String getCallbackName() {
// Return class name only (not prepended by package)
return RegistryUpdater.class.getSimpleName();
}
/** /**
* Events supported by this callback. * Events supported by this callback.
* @param event Flyway event * @param event Flyway event

View File

@@ -73,6 +73,16 @@ public class SiteServiceInitializer implements Callback {
} }
/**
* The callback name, Flyway will use this to sort the callbacks alphabetically before executing them
* @return The callback name
*/
@Override
public String getCallbackName() {
// Return class name only (not prepended by package)
return SiteServiceInitializer.class.getSimpleName();
}
/** /**
* Events supported by this callback. * Events supported by this callback.
* @param event Flyway event * @param event Flyway event

View File

@@ -0,0 +1,57 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.storage.rdbms.hibernate;
import org.apache.commons.lang.StringUtils;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.hibernate.type.AbstractSingleColumnStandardBasicType;
import org.hibernate.type.descriptor.java.StringTypeDescriptor;
import org.hibernate.type.descriptor.sql.ClobTypeDescriptor;
import org.hibernate.type.descriptor.sql.LongVarcharTypeDescriptor;
import org.hibernate.type.descriptor.sql.SqlTypeDescriptor;
/**
* A Hibernate @Type used to properly support the CLOB in both Postgres and Oracle.
* PostgreSQL doesn't have a CLOB type, instead it's a TEXT field.
* Normally, you'd use org.hibernate.type.TextType to support TEXT, but that won't work for Oracle.
* https://github.com/hibernate/hibernate-orm/blob/5.6/hibernate-core/src/main/java/org/hibernate/type/TextType.java
*
* This Type checks if we are using PostgreSQL.
* If so, it configures Hibernate to map CLOB to LongVarChar (same as org.hibernate.type.TextType)
* If not, it uses default CLOB (which works for other databases).
*/
public class DatabaseAwareLobType extends AbstractSingleColumnStandardBasicType<String> {
public static final DatabaseAwareLobType INSTANCE = new DatabaseAwareLobType();
public DatabaseAwareLobType() {
super( getDbDescriptor(), StringTypeDescriptor.INSTANCE );
}
public static SqlTypeDescriptor getDbDescriptor() {
if ( isPostgres() ) {
return LongVarcharTypeDescriptor.INSTANCE;
} else {
return ClobTypeDescriptor.DEFAULT;
}
}
private static boolean isPostgres() {
ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
String dbDialect = configurationService.getProperty("db.dialect");
return StringUtils.containsIgnoreCase(dbDialect, "PostgreSQL");
}
@Override
public String getName() {
return "database_aware_lob";
}
}

View File

@@ -1,67 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.storage.rdbms.hibernate.postgres;
import java.sql.Types;
import org.hibernate.dialect.PostgreSQL82Dialect;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.type.PostgresUUIDType;
import org.hibernate.type.descriptor.sql.LongVarcharTypeDescriptor;
import org.hibernate.type.descriptor.sql.SqlTypeDescriptor;
/**
* UUID's are not supported by default in hibernate due to differences in the database in order to fix this a custom
* sql dialect is needed.
* Source: https://forum.hibernate.org/viewtopic.php?f=1&amp;t=1014157
*
* @author kevinvandevelde at atmire.com
*/
public class DSpacePostgreSQL82Dialect extends PostgreSQL82Dialect {
@Override
public void contributeTypes(final org.hibernate.boot.model.TypeContributions typeContributions,
final ServiceRegistry serviceRegistry) {
super.contributeTypes(typeContributions, serviceRegistry);
typeContributions.contributeType(new InternalPostgresUUIDType());
}
@Override
protected void registerHibernateType(int code, String name) {
super.registerHibernateType(code, name);
}
protected static class InternalPostgresUUIDType extends PostgresUUIDType {
@Override
protected boolean registerUnderJavaType() {
return true;
}
}
/**
* Override is needed to properly support the CLOB on metadatavalue in Postgres and Oracle.
*
* @param sqlCode {@linkplain java.sql.Types JDBC type-code} for the column mapped by this type.
* @return Descriptor for the SQL/JDBC side of a value mapping.
*/
@Override
public SqlTypeDescriptor getSqlTypeDescriptorOverride(int sqlCode) {
SqlTypeDescriptor descriptor;
switch (sqlCode) {
case Types.CLOB: {
descriptor = LongVarcharTypeDescriptor.INSTANCE;
break;
}
default: {
descriptor = super.getSqlTypeDescriptorOverride(sqlCode);
break;
}
}
return descriptor;
}
}

View File

@@ -86,10 +86,11 @@ public class MigrationUtils {
cascade = true; cascade = true;
break; break;
case "h2": case "h2":
// In H2, constraints are listed in the "information_schema.constraints" table // In H2, column constraints are listed in the "INFORMATION_SCHEMA.KEY_COLUMN_USAGE" table
constraintNameSQL = "SELECT DISTINCT CONSTRAINT_NAME " + constraintNameSQL = "SELECT DISTINCT CONSTRAINT_NAME " +
"FROM information_schema.constraints " + "FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE " +
"WHERE table_name = ? AND column_list = ?"; "WHERE TABLE_NAME = ? AND COLUMN_NAME = ?";
cascade = true;
break; break;
default: default:
throw new SQLException("DBMS " + dbtype + " is unsupported in this migration."); throw new SQLException("DBMS " + dbtype + " is unsupported in this migration.");

View File

@@ -53,11 +53,8 @@ public class SubmissionFormsMigrationCliScriptConfiguration<T extends Submission
Options options = new Options(); Options options = new Options();
options.addOption("f", "input-forms", true, "Path to source input-forms.xml file location"); options.addOption("f", "input-forms", true, "Path to source input-forms.xml file location");
options.getOption("f").setType(String.class);
options.addOption("s", "item-submission", true, "Path to source item-submission.xml file location"); options.addOption("s", "item-submission", true, "Path to source item-submission.xml file location");
options.getOption("s").setType(String.class);
options.addOption("h", "help", false, "help"); options.addOption("h", "help", false, "help");
options.getOption("h").setType(boolean.class);
super.options = options; super.options = options;
} }

View File

@@ -24,10 +24,10 @@ import org.apache.commons.cli.Options;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.jdom.Document; import org.jdom2.Document;
import org.jdom.Element; import org.jdom2.Element;
import org.jdom.output.Format; import org.jdom2.output.Format;
import org.jdom.output.XMLOutputter; import org.jdom2.output.XMLOutputter;
import org.xml.sax.Attributes; import org.xml.sax.Attributes;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler; import org.xml.sax.helpers.DefaultHandler;

View File

@@ -245,13 +245,13 @@ insert into most_recent_checksum
) )
select select
bitstream.bitstream_id, bitstream.bitstream_id,
'1', true,
CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END, CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END,
CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END, CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END,
FORMATDATETIME(NOW(),'DD-MM-RRRR HH24:MI:SS'), FORMATDATETIME(NOW(),'DD-MM-RRRR HH24:MI:SS'),
FORMATDATETIME(NOW(),'DD-MM-RRRR HH24:MI:SS'), FORMATDATETIME(NOW(),'DD-MM-RRRR HH24:MI:SS'),
CASE WHEN bitstream.checksum_algorithm IS NULL THEN 'MD5' ELSE bitstream.checksum_algorithm END, CASE WHEN bitstream.checksum_algorithm IS NULL THEN 'MD5' ELSE bitstream.checksum_algorithm END,
'1' true
from bitstream; from bitstream;
-- Update all the deleted checksums -- Update all the deleted checksums
@@ -263,7 +263,7 @@ update most_recent_checksum
set to_be_processed = 0 set to_be_processed = 0
where most_recent_checksum.bitstream_id in ( where most_recent_checksum.bitstream_id in (
select bitstream_id select bitstream_id
from bitstream where deleted = '1' ); from bitstream where deleted = true );
-- this will insert into history table -- this will insert into history table
-- for the initial start -- for the initial start

View File

@@ -36,7 +36,7 @@ alter table metadatavalue alter column resource_id set not null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
community_id AS resource_id, community_id AS resource_id,
4 AS resource_type_id, 4 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id,
@@ -47,7 +47,7 @@ FROM community where not introductory_text is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
community_id AS resource_id, community_id AS resource_id,
4 AS resource_type_id, 4 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id,
@@ -58,7 +58,7 @@ FROM community where not short_description is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
community_id AS resource_id, community_id AS resource_id,
4 AS resource_type_id, 4 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id,
@@ -69,7 +69,7 @@ FROM community where not side_bar_text is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
community_id AS resource_id, community_id AS resource_id,
4 AS resource_type_id, 4 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id,
@@ -80,7 +80,7 @@ FROM community where not copyright_text is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
community_id AS resource_id, community_id AS resource_id,
4 AS resource_type_id, 4 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id,
@@ -104,7 +104,7 @@ alter table community drop column name;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
collection_id AS resource_id, collection_id AS resource_id,
3 AS resource_type_id, 3 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id,
@@ -115,7 +115,7 @@ FROM collection where not introductory_text is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
collection_id AS resource_id, collection_id AS resource_id,
3 AS resource_type_id, 3 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id,
@@ -126,7 +126,7 @@ FROM collection where not short_description is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
collection_id AS resource_id, collection_id AS resource_id,
3 AS resource_type_id, 3 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id,
@@ -137,7 +137,7 @@ FROM collection where not side_bar_text is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
collection_id AS resource_id, collection_id AS resource_id,
3 AS resource_type_id, 3 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id,
@@ -148,7 +148,7 @@ FROM collection where not copyright_text is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
collection_id AS resource_id, collection_id AS resource_id,
3 AS resource_type_id, 3 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id,
@@ -159,7 +159,7 @@ FROM collection where not name is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
collection_id AS resource_id, collection_id AS resource_id,
3 AS resource_type_id, 3 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'provenance' and qualifier is null) AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'provenance' and qualifier is null) AS metadata_field_id,
@@ -170,7 +170,7 @@ FROM collection where not provenance_description is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
collection_id AS resource_id, collection_id AS resource_id,
3 AS resource_type_id, 3 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier = 'license') AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier = 'license') AS metadata_field_id,
@@ -194,7 +194,7 @@ alter table collection drop column provenance_description;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
bundle_id AS resource_id, bundle_id AS resource_id,
1 AS resource_type_id, 1 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id,
@@ -214,7 +214,7 @@ alter table bundle drop column name;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
bitstream_id AS resource_id, bitstream_id AS resource_id,
0 AS resource_type_id, 0 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id,
@@ -225,7 +225,7 @@ FROM bitstream where not name is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
bitstream_id AS resource_id, bitstream_id AS resource_id,
0 AS resource_type_id, 0 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id,
@@ -236,7 +236,7 @@ FROM bitstream where not description is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
bitstream_id AS resource_id, bitstream_id AS resource_id,
0 AS resource_type_id, 0 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'format' and qualifier is null) AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'format' and qualifier is null) AS metadata_field_id,
@@ -247,7 +247,7 @@ FROM bitstream where not user_format_description is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
bitstream_id AS resource_id, bitstream_id AS resource_id,
0 AS resource_type_id, 0 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'source' and qualifier is null) AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'source' and qualifier is null) AS metadata_field_id,
@@ -269,7 +269,7 @@ alter table bitstream drop column source;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
eperson_group_id AS resource_id, eperson_group_id AS resource_id,
6 AS resource_type_id, 6 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id,
@@ -288,7 +288,7 @@ alter table epersongroup drop column name;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
eperson_id AS resource_id, eperson_id AS resource_id,
7 AS resource_type_id, 7 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'firstname' and qualifier is null) AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'firstname' and qualifier is null) AS metadata_field_id,
@@ -299,7 +299,7 @@ FROM eperson where not firstname is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
eperson_id AS resource_id, eperson_id AS resource_id,
7 AS resource_type_id, 7 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'lastname' and qualifier is null) AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'lastname' and qualifier is null) AS metadata_field_id,
@@ -310,7 +310,7 @@ FROM eperson where not lastname is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
eperson_id AS resource_id, eperson_id AS resource_id,
7 AS resource_type_id, 7 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'phone' and qualifier is null) AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'phone' and qualifier is null) AS metadata_field_id,
@@ -321,7 +321,7 @@ FROM eperson where not phone is null;
INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place)
SELECT SELECT
metadatavalue_seq.nextval as metadata_value_id, NEXT VALUE FOR metadatavalue_seq as metadata_value_id,
eperson_id AS resource_id, eperson_id AS resource_id,
7 AS resource_type_id, 7 AS resource_type_id,
(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'language' and qualifier is null) AS metadata_field_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'language' and qualifier is null) AS metadata_field_id,

View File

@@ -14,11 +14,11 @@ UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_i
SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream
LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id
LEFT JOIN item ON item2bundle.item_id = item.item_id LEFT JOIN item ON item2bundle.item_id = item.item_id
WHERE item.withdrawn = 1 WHERE item.withdrawn = true
); );
UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 1 and resource_id in ( UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 1 and resource_id in (
SELECT item2bundle.bundle_id FROM item2bundle SELECT item2bundle.bundle_id FROM item2bundle
LEFT JOIN item ON item2bundle.item_id = item.item_id LEFT JOIN item ON item2bundle.item_id = item.item_id
WHERE item.withdrawn = 1 WHERE item.withdrawn = true
); );

View File

@@ -17,7 +17,7 @@
INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, start_date, end_date, rpname, INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, start_date, end_date, rpname,
rptype, rpdescription, eperson_id, epersongroup_id, dspace_object) rptype, rpdescription, eperson_id, epersongroup_id, dspace_object)
SELECT SELECT
resourcepolicy_seq.nextval AS policy_id, NEXT VALUE FOR resourcepolicy_seq AS policy_id,
resource_type_id, resource_type_id,
resource_id, resource_id,
-- Insert the Constants.DELETE action -- Insert the Constants.DELETE action

View File

@@ -14,11 +14,11 @@ UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and dspace_object i
SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream
LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id
LEFT JOIN item ON item2bundle.item_id = item.uuid LEFT JOIN item ON item2bundle.item_id = item.uuid
WHERE item.withdrawn = 1 WHERE item.withdrawn = true
); );
UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and dspace_object in ( UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and dspace_object in (
SELECT item2bundle.bundle_id FROM item2bundle SELECT item2bundle.bundle_id FROM item2bundle
LEFT JOIN item ON item2bundle.item_id = item.uuid LEFT JOIN item ON item2bundle.item_id = item.uuid
WHERE item.withdrawn = 1 WHERE item.withdrawn = true
); );

View File

@@ -9,10 +9,11 @@
---------------------------------------------------- ----------------------------------------------------
-- Make sure the metadatavalue.place column starts at 0 instead of 1 -- Make sure the metadatavalue.place column starts at 0 instead of 1
---------------------------------------------------- ----------------------------------------------------
CREATE LOCAL TEMPORARY TABLE mdv_minplace ( CREATE LOCAL TEMPORARY TABLE mdv_minplace (
dspace_object_id UUID NOT NULL, dspace_object_id UUID NOT NULL,
metadata_field_id INT NOT NULL, metadata_field_id INT NOT NULL,
minplace INT NOT NULL, minplace INT NOT NULL
); );
INSERT INTO mdv_minplace INSERT INTO mdv_minplace

View File

@@ -15,6 +15,7 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.app.launcher.ScriptLauncher; import org.dspace.app.launcher.ScriptLauncher;
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
import org.dspace.authority.AuthoritySearchService;
import org.dspace.authority.MockAuthoritySolrServiceImpl; import org.dspace.authority.MockAuthoritySolrServiceImpl;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.builder.AbstractBuilder; import org.dspace.builder.AbstractBuilder;
@@ -31,8 +32,9 @@ import org.dspace.kernel.ServiceManager;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.statistics.MockSolrLoggerServiceImpl; import org.dspace.statistics.MockSolrLoggerServiceImpl;
import org.dspace.statistics.MockSolrStatisticsCore; import org.dspace.statistics.MockSolrStatisticsCore;
import org.dspace.statistics.SolrStatisticsCore;
import org.dspace.storage.rdbms.DatabaseUtils; import org.dspace.storage.rdbms.DatabaseUtils;
import org.jdom.Document; import org.jdom2.Document;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@@ -183,15 +185,15 @@ public class AbstractIntegrationTestWithDatabase extends AbstractDSpaceIntegrati
searchService.reset(); searchService.reset();
// Clear the statistics core. // Clear the statistics core.
serviceManager serviceManager
.getServiceByName(null, MockSolrStatisticsCore.class) .getServiceByName(SolrStatisticsCore.class.getName(), MockSolrStatisticsCore.class)
.reset(); .reset();
MockSolrLoggerServiceImpl statisticsService = serviceManager MockSolrLoggerServiceImpl statisticsService = serviceManager
.getServiceByName(null, MockSolrLoggerServiceImpl.class); .getServiceByName("solrLoggerService", MockSolrLoggerServiceImpl.class);
statisticsService.reset(); statisticsService.reset();
MockAuthoritySolrServiceImpl authorityService = serviceManager MockAuthoritySolrServiceImpl authorityService = serviceManager
.getServiceByName(null, MockAuthoritySolrServiceImpl.class); .getServiceByName(AuthoritySearchService.class.getName(), MockAuthoritySolrServiceImpl.class);
authorityService.reset(); authorityService.reset();
// Reload our ConfigurationService (to reset configs to defaults again) // Reload our ConfigurationService (to reset configs to defaults again)

View File

@@ -1,181 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import org.dspace.content.Item;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* Drive the POI-based MS Word filter.
*
* @author mwood
*/
public class PoiWordFilterTest {
public PoiWordFilterTest() {
}
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
/**
* Test of getFilteredName method, of class PoiWordFilter.
*/
/*
@Test
public void testGetFilteredName()
{
System.out.println("getFilteredName");
String oldFilename = "";
PoiWordFilter instance = new PoiWordFilter();
String expResult = "";
String result = instance.getFilteredName(oldFilename);
assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
*/
/**
* Test of getBundleName method, of class PoiWordFilter.
*/
/*
@Test
public void testGetBundleName()
{
System.out.println("getBundleName");
PoiWordFilter instance = new PoiWordFilter();
String expResult = "";
String result = instance.getBundleName();
assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
*/
/**
* Test of getFormatString method, of class PoiWordFilter.
*/
/*
@Test
public void testGetFormatString()
{
System.out.println("getFormatString");
PoiWordFilter instance = new PoiWordFilter();
String expResult = "";
String result = instance.getFormatString();
assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
*/
/**
* Test of getDescription method, of class PoiWordFilter.
*/
/*
@Test
public void testGetDescription()
{
System.out.println("getDescription");
PoiWordFilter instance = new PoiWordFilter();
String expResult = "";
String result = instance.getDescription();
assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
*/
/**
* Test of getDestinationStream method, of class PoiWordFilter.
* Read a constant .doc document and examine the extracted text.
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetDestinationStreamDoc()
throws Exception {
System.out.println("getDestinationStream");
Item currentItem = null;
InputStream source;
boolean verbose = false;
PoiWordFilter instance = new PoiWordFilter();
InputStream result;
source = getClass().getResourceAsStream("wordtest.doc");
result = instance.getDestinationStream(currentItem, source, verbose);
assertTrue("Known content was not found", readAll(result).contains("quick brown fox"));
}
/**
* Test of getDestinationStream method, of class PoiWordFilter.
* Read a constant .docx document and examine the extracted text.
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetDestinationStreamDocx()
throws Exception {
System.out.println("getDestinationStream");
Item currentItem = null;
InputStream source;
boolean verbose = false;
PoiWordFilter instance = new PoiWordFilter();
InputStream result;
source = getClass().getResourceAsStream("wordtest.docx");
result = instance.getDestinationStream(currentItem, source, verbose);
assertTrue("Known content was not found", readAll(result).contains("quick brown fox"));
}
/**
* Read the entire content of a stream into a String.
*
* @param stream a stream of UTF-8 characters.
* @return complete content of {@link stream}
* @throws IOException
*/
private static String readAll(InputStream stream)
throws IOException {
if (null == stream) {
return null;
}
byte[] bytes = new byte[stream.available()];
StringBuilder resultSb = new StringBuilder(bytes.length / 2); // Guess: average 2 bytes per character
int howmany;
while ((howmany = stream.read(bytes)) > 0) {
resultSb.append(new String(bytes, 0, howmany, StandardCharsets.UTF_8));
}
return resultSb.toString();
}
}

Some files were not shown because too many files have changed in this diff Show More