Resolved merge conflicts from upstream fetch.

This commit is contained in:
William Welling
2016-01-21 11:03:54 -06:00
1605 changed files with 87468 additions and 87633 deletions

View File

@@ -1,4 +1,5 @@
language: java
sudo: false
env:
# Give Maven 1GB of memory to work with
@@ -31,13 +32,13 @@ install: "echo 'Skipping install stage, dependencies will be downloaded during b
# 2. Assemble DSpace
script:
# 1. [Install & Unit Test] Check source code licenses and run source code Unit Tests
# (This explicitly skips building the 'dspace' assembly module, since we only want to do that ONCE.)
# license:check => Validate all source code license headers
# -Dmaven.test.skip=false => Enable DSpace Unit Tests
# -P !dspace => SKIP full DSpace assembly (will do below)
# -DskipITs=false => Enable DSpace Integration Tests
# -P !assembly => Skip normal assembly (as it can be memory intensive)
# -B => Maven batch/non-interactive mode (recommended for CI)
# -V => Display Maven version info before build
- "mvn clean install license:check -Dmaven.test.skip=false -P !dspace -B -V"
- "mvn clean install license:check -Dmaven.test.skip=false -DskipITs=false -P !assembly -B -V"
# 2. [Assemble DSpace] Ensure assembly process works (from [src]/dspace/), including Mirage 2
# -Dmirage2.on=true => Build Mirage2
# -Dmirage2.deps.included=false => Don't include Mirage2 build dependencies (We installed them in before_install)

View File

@@ -1,162 +1,393 @@
DSpace uses third-party libraries which may be distributed under different
licenses. We have attempted to list all of these third party libraries and
their licenses below (however the most up-to-date information can be found
via Maven, see NOTE #2 at bottom of this page).
licenses. We have listed all of these third party libraries and their licenses
below. This file can be regenerated at any time by simply running:
mvn clean verify -Dthird.party.licenses=true
You must agree to the terms of these licenses, in addition to the DSpace
source code license, in order to use this software.
--------------------------------------------------
Third party Java libraries listed by License type
[Format: Name (Maven Project) - URL]
--------------------------------------------------
---------------------------------------------------
Third party Java libraries listed by License type.
Apache Software License, Version 2.0 (http://opensource.org/licenses/apache2.0)
* Ant-Contrib Tasks (ant-contrib:*) - http://ant-contrib.sourceforge.net/
* Apache Abdera (org.apache.abdera::*) - http://projects.apache.org/projects/abdera.html
* Apache Ant (org.apache.ant:*) - http://ant.apache.org/
* Apache Axis (axis:*) - http://axis.apache.org/axis/
* Apache Cocoon (org.apache.cocoon:*) - http://cocoon.apache.org/2.2/license.html
* Apache Commons BeanUtils (commons-beanutils:*) - http://commons.apache.org/beanutils/
* Apache Commons CLI (commons-cli:*) - http://commons.apache.org/cli/license.html
* Apache Commons Codec (commons-codec:*) - http://commons.apache.org/codec/license.html
* Apache Commons Collections (commons-collections:*) - http://commons.apache.org/collections/license.html
* Apache Commons Configuration (commons-configuration:*) - http://commons.apache.org/configuration/license.html
* Apache Commons DBCP (commons-dbcp:*) - http://commons.apache.org/dbcp/license.html
* Apache Commons Digester (commons-digester:*) - http://commons.apache.org/digester/
* Apache Commons Discovery (commons-discovery:*) - http://commons.apache.org/discovery/license.html
* Apache Commons FileUpload (commons-fileupload:*) - http://commons.apache.org/fileupload/license.html
* Apache Commons HTTP Client (commons-httpclient:*) - http://commons.apache.org/httpclient/license.html
* Apache Commons IO (commons-io:*) - http://commons.apache.org/io/license.html
* Apache Commons JXPath (commons-jxpath:*) - http://commons.apache.org/jxpath/license.html
* Apache Commons Lang (commons-lang:*) - http://commons.apache.org/lang/license.html
* Apache Commons Logging (commons-logging:*) - http://commons.apache.org/logging/license.html
* Apache Commons Pool (commons-pool:*) - http://commons.apache.org/pool/license.html
* Apache Commons Validator (commons-validator:*) - http://commons.apache.org/validator/license.html
* Apache Geronimo (org.apache.geronimo.specs:*) - http://geronimo.apache.org/
* Apache HTTPComponents (org.apache.httpcomponents:*) - http://hc.apache.org/
* Apache Jakarta ORO (oro:*) - http://svn.apache.org/repos/asf/jakarta/oro/trunk/LICENSE
* Apache Jakarta Regexp (jakarta-regexp:*) - http://jakarta.apache.org/regexp/
* Apache JaxMe (jaxme:jaxme-api) - http://ws.apache.org/old/jaxme-old/license.html
* Apache Jena (com.hp.hpl.jena:*) - http://jena.apache.org/
* Apache log4j (log4j:*) : http://logging.apache.org/log4j/
* Apache Lucene (org.apache.lucene:*) - http://lucene.apache.org/
* Apache PDFBox (org.apache.pdfbox:*) - http://pdfbox.apache.org/
* Apache POI (org.apache.poi:*) - http://poi.apache.org/
* Apache Solr (org.apache.solr:*) - http://lucene.apache.org/solr/
* Apache Xerces (xerces:*) - http://xerces.apache.org/
* Apache XML Commons (xml-apis:*) - http://xerces.apache.org/xml-commons/licenses.html
* Apache XML Project (xalan:*) - http://xml.apache.org/xalan-j/#license
* Apache XMLBeans (org.apache.xmlbeans:*) - http://xmlbeans.apache.org/
* Apache ZooKeeper (org.apache.zookeeper:*) - http://zookeeper.apache.org/
* Databene ContiPerf (org.databene:contiperf) - http://databene.org/contiperf
* Ehcache (net.sf.ehcache:*) - http://ehcache.org/about/license
* ElasticSearch (org.elasticsearch:*) - http://www.elasticsearch.org/
* Evo Inflector (org.atteo:*) - http://www.atteo.org/evo-framework/inflector/
* flexjson (net.sf.flexjson:*) - http://sourceforge.net/projects/flexjson/
* Google GSON (com.google.code.gson:*) - http://code.google.com/p/google-gson/
* Google Guava (com.google.guava:*) - http://code.google.com/p/guava-libraries/
* Jackson (org.codehaus.jackson:*) - http://jackson.codehaus.org/
* Jettison (org.codejaus.jettison:*) - http://jettison.codehaus.org/
* Jetty (org.mortbay.jetty:*) - http://jetty.codehaus.org/jetty/license.html
* Lyncode XOAI (com.lyncode:xoai) - http://www.lyncode.com/
* noggit (org.noggit:noggit) - http://noggit.org/
* oai4j (se.kb:oai4j) - http://oai4j-client.sourceforge.net/
* OpenCSV (net.sf.opencsv:*) - http://opencsv.sourceforge.net/
* Rome (net.java.dev.rome:*, org.rometools:*, rome:*) - http://rometools.org/
* spatial4j (com.spatial4j:*) - http://spatial4j.com/
* Spring Framework (org.springframework:*) - http://www.springsource.org/spring-framework
* SWORD Libraries (org.swordapp:*) - http://mvnrepository.com/artifact/org.swordapp/server/2.0
* Woodstox (org.codehaus.woodstox:*) - http://woodstox.codehaus.org/Download
PLEASE NOTE: Some dependencies may be listed under multiple licenses if they
are dual-licensed. This is especially true of anything listed as
"GNU General Public Library" below, as DSpace actually does NOT allow for any
dependencies that are solely released under GPL terms. For more info see:
https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
---------------------------------------------------
BSD License (http://www.opensource.org/licenses/BSD-3-Clause)
* asm (asm:*) - http://asm.ow2.org/
* Biblio Transformation Engine (gr.ekt:biblio-transformation-engine) - http://code.google.com/p/biblio-transformation-engine/
* DNSJava (org.dspace.dnsjava:dnsjava)- http://www.xbill.org/dnsjava/dnsjava-current/README
* dom4j (dom4j:*, maven:dom4j) - http://dom4j.sourceforge.net/dom4j-1.6.1/license.html
* Foresite Toolkit (com.googlecode.foresite-toolkit:*) - http://code.google.com/p/foresite-toolkit/
* jargon (org.dspace:jargon) - http://www.sdsc.edu/srb/index.php/Jargon
* Java BibTeX Parser (org.jbibtex:*) - https://code.google.com/p/java-bibtex/
* Jaxen (jaxen:*) - http://jaxen.codehaus.org/license.html
* JLine (jline:*) - http://jline.sourceforge.net/
* JUnitPerf (junitperf:*) - http://www.clarkware.com/software/JUnitPerf.html#license
* MSV (msv:*) - http://msv.java.net/
* StAX (Streaming API for XML) (stax:*) - http://stax.codehaus.org/
* XMLUnit (xmlunit:*) - http://xmlunit.sourceforge.net/
* YUI (com.yahoo.platform.yui:*) - http://yuilibrary.com/license/
Common Development and Distribution License (CDDL) v1.0 (http://www.opensource.org/licenses/CDDL-1.0)
* JavaBeans Activation Framework (javax.activation:*) - http://www.opensource.org/licenses/CDDL-1.0
* Java Mail (javax.mail:*) - http://www.opensource.org/licenses/CDDL-1.0
* JAX-RPC (javax.xml:jaxrpc-api) - http://java.net/projects/jax-rpc/
Apache Software License, Version 2.0:
Common Development and Distribution License (CDDL) v1.1 (http://glassfish.java.net/public/CDDL+GPL_1_1.html)
* JAXB (com.sun.xml.bind:*) - http://jaxb.java.net/
* Jersey (com.sun.jersey.*) - https://jersey.java.net/
* Ant-Contrib Tasks (ant-contrib:ant-contrib:1.0b3 - http://ant-contrib.sourceforge.net)
* Code Generation Library (cglib:cglib:3.1 - http://cglib.sourceforge.net/)
* HPPC Collections (com.carrotsearch:hppc:0.5.2 - http://labs.carrotsearch.com/hppc.html/hppc)
* metadata-extractor (com.drewnoakes:metadata-extractor:2.6.2 - http://code.google.com/p/metadata-extractor/)
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.3.0 - http://wiki.fasterxml.com/JacksonHome)
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.1.3 - http://wiki.fasterxml.com/JacksonHome)
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.3.3 - http://wiki.fasterxml.com/JacksonHome)
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.3.3 - http://wiki.fasterxml.com/JacksonHome)
* Google APIs Client Library for Java (com.google.api-client:google-api-client:1.19.0 - http://code.google.com/p/google-api-java-client/google-api-client/)
* Google Analytics API v3-rev103-1.19.0 (com.google.apis:google-api-services-analytics:v3-rev103-1.19.0 - http://nexus.sonatype.org/oss-repository-hosting.html/google-api-services-analytics)
* FindBugs-jsr305 (com.google.code.findbugs:jsr305:3.0.0 - http://findbugs.sourceforge.net/)
* Gson (com.google.code.gson:gson:2.2.1 - http://code.google.com/p/google-gson/)
* Guava: Google Core Libraries for Java (com.google.guava:guava:13.0 - http://code.google.com/p/guava-libraries/guava)
* Guava: Google Core Libraries for Java (com.google.guava:guava:14.0.1 - http://code.google.com/p/guava-libraries/guava)
* Guava: Google Core Libraries for Java (com.google.guava:guava:18.0 - http://code.google.com/p/guava-libraries/guava)
* Guava: Google Core Libraries for Java (com.google.guava:guava-jdk5:13.0 - http://code.google.com/p/guava-libraries/guava-jdk5)
* Google HTTP Client Library for Java (com.google.http-client:google-http-client:1.19.0 - http://code.google.com/p/google-http-java-client/google-http-client/)
* Jackson 2 extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-jackson2:1.19.0 - http://code.google.com/p/google-http-java-client/google-http-client-jackson2/)
* Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.19.0 - http://code.google.com/p/google-oauth-java-client/google-oauth-client/)
* Java 6 (and higher) extensions to the Google OAuth Client Library for Java. (com.google.oauth-client:google-oauth-client-java6:1.19.0 - http://code.google.com/p/google-oauth-java-client/google-oauth-client-java6/)
* Jetty extensions to the Google OAuth Client Library for Java. (com.google.oauth-client:google-oauth-client-jetty:1.19.0 - http://code.google.com/p/google-oauth-java-client/google-oauth-client-jetty/)
* ConcurrentLinkedHashMap (com.googlecode.concurrentlinkedhashmap:concurrentlinkedhashmap-lru:1.2 - http://code.google.com/p/concurrentlinkedhashmap)
* FORESITE :: Object Reuse and Exchange library (com.googlecode.foresite-toolkit:foresite:0.9 - http://www.openarchives.org/ore)
* ISO Parser (com.googlecode.mp4parser:isoparser:1.0-RC-1 - http://code.google.com/p/mp4parser/)
* builder-commons (com.lyncode:builder-commons:1.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/builder-commons)
* Jtwig Core (com.lyncode:jtwig-core:2.0.1 - http://www.lyncode.com/jtwig-core)
* Jtwig Core Functions (com.lyncode:jtwig-functions:2.0.1 - http://www.lyncode.com/jtwig-functions)
* Jtwig Spring (com.lyncode:jtwig-spring:2.0.1 - http://www.lyncode.com/jtwig-spring)
* Test Support (com.lyncode:test-support:1.0.3 - http://nexus.sonatype.org/oss-repository-hosting.html/test-support)
* XOAI : OAI-PMH Java Toolkit (com.lyncode:xoai:3.2.9 - http://www.lyncode.com)
* Spatial4J (com.spatial4j:spatial4j:0.4.1 - https://github.com/spatial4j/spatial4j)
* Commons BeanUtils (commons-beanutils:commons-beanutils:1.8.3 - http://commons.apache.org/beanutils/)
* Commons CLI (commons-cli:commons-cli:1.2 - http://commons.apache.org/cli/)
* Apache Commons Codec (commons-codec:commons-codec:1.9 - http://commons.apache.org/proper/commons-codec/)
* Collections (commons-collections:commons-collections:3.2 - http://jakarta.apache.org/commons/collections/)
* Commons Configuration (commons-configuration:commons-configuration:1.6 - http://commons.apache.org/${pom.artifactId.substring(8)}/)
* Commons Configuration (commons-configuration:commons-configuration:1.8 - http://commons.apache.org/configuration/)
* Commons DBCP (commons-dbcp:commons-dbcp:1.4 - http://commons.apache.org/dbcp/)
* Digester (commons-digester:commons-digester:1.8 - http://jakarta.apache.org/commons/digester/)
* Commons FileUpload (commons-fileupload:commons-fileupload:1.2.1 - http://commons.apache.org/fileupload/)
* HttpClient (commons-httpclient:commons-httpclient:3.1 - http://jakarta.apache.org/httpcomponents/httpclient-3.x/)
* Commons IO (commons-io:commons-io:2.3 - http://commons.apache.org/io/)
* commons-jexl (commons-jexl:commons-jexl:1.0 - no url defined)
* Commons JXPath (commons-jxpath:commons-jxpath:1.3 - http://commons.apache.org/jxpath/)
* Commons Lang (commons-lang:commons-lang:2.6 - http://commons.apache.org/lang/)
* Commons Logging (commons-logging:commons-logging:1.1.1 - http://commons.apache.org/logging)
* Commons Pool (commons-pool:commons-pool:1.4 - http://commons.apache.org/pool/)
* Commons Validator (commons-validator:commons-validator:1.4.0 - http://commons.apache.org/validator/)
* Boilerpipe -- Boilerplate Removal and Fulltext Extraction from HTML pages (de.l3s.boilerpipe:boilerpipe:1.1.0 - http://code.google.com/p/boilerpipe/)
* jakarta-regexp (jakarta-regexp:jakarta-regexp:1.4 - no url defined)
* javax.inject (javax.inject:javax.inject:1 - http://code.google.com/p/atinject/)
* jdbm (jdbm:jdbm:1.0 - no url defined)
* Joda time (joda-time:joda-time:2.2 - http://joda-time.sourceforge.net)
* Joda-Time (joda-time:joda-time:2.3 - http://www.joda.org/joda-time/)
* Apache Log4j (log4j:log4j:1.2.16 - http://logging.apache.org/log4j/1.2/)
* Apache Log4j (log4j:log4j:1.2.17 - http://logging.apache.org/log4j/1.2/)
* Ehcache Core (net.sf.ehcache:ehcache-core:1.7.2 - http://ehcache.sf.net/ehcache-core)
* opencsv (net.sf.opencsv:opencsv:2.0 - http://opencsv.sf.net)
* opencsv (net.sf.opencsv:opencsv:2.3 - http://opencsv.sf.net)
* Abdera Client (org.apache.abdera:abdera-client:1.1.1 - http://abdera.apache.org/abdera-client)
* Abdera Core (org.apache.abdera:abdera-core:1.1.1 - http://abdera.apache.org/abdera-core)
* I18N Libraries (org.apache.abdera:abdera-i18n:1.1.1 - http://abdera.apache.org)
* Abdera Parser (org.apache.abdera:abdera-parser:1.1.1 - http://abdera.apache.org/abdera-parser)
* org.apache.tools.ant (org.apache.ant:ant:1.7.0 - http://ant.apache.org/ant/)
* ant-launcher (org.apache.ant:ant-launcher:1.7.0 - http://ant.apache.org/ant-launcher/)
* Avalon Framework API (org.apache.avalon.framework:avalon-framework-api:4.3.1 - http://www.apache.org/excalibur/avalon-framework/avalon-framework-api/)
* Avalon Framework Implementation (org.apache.avalon.framework:avalon-framework-impl:4.3.1 - http://www.apache.org/excalibur/avalon-framework/avalon-framework-impl/)
* Cocoon Configuration API (org.apache.cocoon:cocoon-configuration-api:1.0.2 - http://cocoon.apache.org/subprojects/configuration/1.0/configuration-api/1.0/)
* Cocoon Core (org.apache.cocoon:cocoon-core:2.2.0 - http://cocoon.apache.org/2.2/core-modules/core/2.2/)
* Cocoon Expression Language API (org.apache.cocoon:cocoon-expression-language-api:1.0.0 - http://cocoon.apache.org/2.2/core-modules/expression-language-api/1.0/)
* Cocoon Expression Language Implementation. (org.apache.cocoon:cocoon-expression-language-impl:1.0.0 - http://cocoon.apache.org/2.2/core-modules/expression-language-impl/1.0/)
* Cocoon Flowscript Block Implementation (org.apache.cocoon:cocoon-flowscript-impl:1.0.0 - http://cocoon.apache.org/2.2/blocks/flowscript/1.0/)
* Cocoon Linkrewriter Block Implementation (org.apache.cocoon:cocoon-linkrewriter-impl:1.0.0 - http://cocoon.apache.org/2.2/blocks/linkrewriter/1.0/)
* Cocoon Pipeline API (org.apache.cocoon:cocoon-pipeline-api:1.0.0 - http://cocoon.apache.org/2.2/core-modules/pipeline-api/1.0/)
* Cocoon Pipeline Components (org.apache.cocoon:cocoon-pipeline-components:1.0.0 - http://cocoon.apache.org/2.2/core-modules/pipeline-components/1.0/)
* Cocoon Pipeline Implementation (org.apache.cocoon:cocoon-pipeline-impl:1.0.0 - http://cocoon.apache.org/2.2/core-modules/pipeline-impl/1.0/)
* Cocoon Servlet Service Components (org.apache.cocoon:cocoon-servlet-service-components:1.0.0 - http://cocoon.apache.org/subprojects/servlet-service/1.0/servlet-service-components/1.0/)
* Cocoon Sitemap API (org.apache.cocoon:cocoon-sitemap-api:1.0.0 - http://cocoon.apache.org/2.2/core-modules/sitemap-api/1.0/)
* Cocoon Sitemap Components (org.apache.cocoon:cocoon-sitemap-components:1.0.0 - http://cocoon.apache.org/2.2/core-modules/sitemap-components/1.0/)
* Cocoon Sitemap Implementation (org.apache.cocoon:cocoon-sitemap-impl:1.0.0 - http://cocoon.apache.org/2.2/core-modules/sitemap-impl/1.0/)
* Cocoon Spring Configurator (org.apache.cocoon:cocoon-spring-configurator:1.0.2 - http://cocoon.apache.org/cocoon-spring-configurator)
* Cocoon Store Implementation (org.apache.cocoon:cocoon-store-impl:1.0.0 - http://cocoon.apache.org/2.2/core-modules/store-impl/1.0/)
* Cocoon Template Framework Block Implementation (org.apache.cocoon:cocoon-template-impl:1.1.0 - http://cocoon.apache.org/2.2/blocks/template/1.0/)
* Cocoon Thread API (org.apache.cocoon:cocoon-thread-api:1.0.0 - http://cocoon.apache.org/2.2/core-modules/thread-api/1.0/)
* Cocoon Thread Implementation (org.apache.cocoon:cocoon-thread-impl:1.0.0 - http://cocoon.apache.org/2.2/core-modules/thread-impl/1.0/)
* Cocoon Util (org.apache.cocoon:cocoon-util:1.0.0 - http://cocoon.apache.org/2.2/core-modules/util/1.0/)
* Cocoon XML API (org.apache.cocoon:cocoon-xml-api:1.0.0 - http://cocoon.apache.org/2.2/core-modules/xml-api/1.0/)
* Cocoon XML Implementation (org.apache.cocoon:cocoon-xml-impl:1.0.0 - http://cocoon.apache.org/2.2/core-modules/xml-impl/1.0/)
* Cocoon XML Resolver (org.apache.cocoon:cocoon-xml-resolver:1.0.0 - http://cocoon.apache.org/2.2/core-modules/xml-resolver/1.0/)
* Cocoon XML Utilities (org.apache.cocoon:cocoon-xml-util:1.0.0 - http://cocoon.apache.org/2.2/core-modules/xml-util/1.0/)
* Apache Commons Compress (org.apache.commons:commons-compress:1.7 - http://commons.apache.org/proper/commons-compress/)
* Commons Lang (org.apache.commons:commons-lang3:3.1 - http://commons.apache.org/lang/)
* Excalibur Pool API (org.apache.excalibur.components:excalibur-pool-api:2.2.1 - http://www.apache.org/excalibur/excalibur-components-modules/excalibur-pool-modules/excalibur-pool-api/)
* Excalibur Sourceresolve (org.apache.excalibur.components:excalibur-sourceresolve:2.2.3 - http://www.apache.org/excalibur/excalibur-sourceresolve/)
* Excalibur Store (org.apache.excalibur.components:excalibur-store:2.2.1 - http://www.apache.org/excalibur/excalibur-components-modules/excalibur-store/)
* Excalibur XML Utilities (org.apache.excalibur.components:excalibur-xmlutil:2.2.1 - http://www.apache.org/excalibur/excalibur-components-modules/excalibur-xmlutil/)
* Excalibur Instrument API (org.apache.excalibur.containerkit:excalibur-instrument-api:2.2.1 - http://www.apache.org/excalibur/excalibur-containerkit/excalibur-instrument-modules/excalibur-instrument-api/)
* Excalibur Logger (org.apache.excalibur.containerkit:excalibur-logger:2.2.1 - http://www.apache.org/excalibur/excalibur-containerkit/excalibur-logger/)
* Activation (org.apache.geronimo.specs:geronimo-activation_1.0.2_spec:1.1 - http://geronimo.apache.org/geronimo-activation_1.0.2_spec)
* Activation 1.1 (org.apache.geronimo.specs:geronimo-activation_1.1_spec:1.0.2 - http://geronimo.apache.org/specs/geronimo-activation_1.1_spec)
* JavaMail 1.4 (org.apache.geronimo.specs:geronimo-javamail_1.4_spec:1.6 - http://geronimo.apache.org/maven/specs/geronimo-javamail_1.4_spec/1.6)
* Streaming API for XML (STAX API 1.0) (org.apache.geronimo.specs:geronimo-stax-api_1.0_spec:1.0 - http://geronimo.apache.org/specs/geronimo-stax-api_1.0_spec)
* Streaming API for XML (STAX API 1.0) (org.apache.geronimo.specs:geronimo-stax-api_1.0_spec:1.0.1 - http://geronimo.apache.org/specs/geronimo-stax-api_1.0_spec)
* Apache Hadoop Annotations (org.apache.hadoop:hadoop-annotations:2.2.0 - no url defined)
* Apache Hadoop Auth (org.apache.hadoop:hadoop-auth:2.2.0 - no url defined)
* Apache Hadoop Common (org.apache.hadoop:hadoop-common:2.2.0 - no url defined)
* Apache Hadoop HDFS (org.apache.hadoop:hadoop-hdfs:2.2.0 - no url defined)
* Apache HttpClient (org.apache.httpcomponents:httpclient:4.3.5 - http://hc.apache.org/httpcomponents-client)
* Apache HttpClient Cache (org.apache.httpcomponents:httpclient-cache:4.2.6 - http://hc.apache.org/httpcomponents-client)
* Apache HttpCore (org.apache.httpcomponents:httpcore:4.3.2 - http://hc.apache.org/httpcomponents-core-ga)
* Apache HttpClient Mime (org.apache.httpcomponents:httpmime:4.3.1 - http://hc.apache.org/httpcomponents-client)
* Apache JAMES Mime4j (Core) (org.apache.james:apache-mime4j-core:0.7.2 - http://james.apache.org/mime4j/apache-mime4j-core)
* Apache JAMES Mime4j (DOM) (org.apache.james:apache-mime4j-dom:0.7.2 - http://james.apache.org/mime4j/apache-mime4j-dom)
* Apache Jena - Libraries POM (org.apache.jena:apache-jena-libs:2.12.0 - http://jena.apache.org/apache-jena-libs/)
* Apache Jena - ARQ (SPARQL 1.1 Query Engine) (org.apache.jena:jena-arq:2.12.0 - http://jena.apache.org/jena-arq/)
* Apache Jena - Core (org.apache.jena:jena-core:2.12.0 - http://jena.apache.org/jena-core/)
* Apache Jena - IRI (org.apache.jena:jena-iri:1.1.0 - http://jena.apache.org/jena-iri/)
* Apache Jena - TDB (Native Triple Store) (org.apache.jena:jena-tdb:1.1.0 - http://jena.apache.org/jena-tdb/)
* Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-analyzers-common)
* Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-analyzers-icu)
* Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji)
* Lucene Morfologik Polish Lemmatizer (org.apache.lucene:lucene-analyzers-morfologik:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-analyzers-morfologik)
* Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic)
* Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn)
* Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-analyzers-stempel)
* Lucene codecs (org.apache.lucene:lucene-codecs:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-codecs)
* Lucene Core (org.apache.lucene:lucene-core:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-core)
* Lucene Expressions (org.apache.lucene:lucene-expressions:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-expressions)
* Lucene Grouping (org.apache.lucene:lucene-grouping:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-grouping)
* Lucene Highlighter (org.apache.lucene:lucene-highlighter:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-highlighter)
* Lucene Join (org.apache.lucene:lucene-join:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-join)
* Lucene Memory (org.apache.lucene:lucene-memory:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-memory)
* Lucene Miscellaneous (org.apache.lucene:lucene-misc:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-misc)
* Lucene Queries (org.apache.lucene:lucene-queries:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-queries)
* Lucene QueryParsers (org.apache.lucene:lucene-queryparser:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-queryparser)
* Lucene Sandbox (org.apache.lucene:lucene-sandbox:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-sandbox)
* Lucene Spatial (org.apache.lucene:lucene-spatial:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-spatial)
* Lucene Suggest (org.apache.lucene:lucene-suggest:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-suggest)
* Apache FontBox (org.apache.pdfbox:fontbox:1.8.7 - http://pdfbox.apache.org/)
* Apache JempBox (org.apache.pdfbox:jempbox:1.8.7 - http://www.apache.org/pdfbox-parent/jempbox/)
* Apache PDFBox (org.apache.pdfbox:pdfbox:1.8.7 - http://www.apache.org/pdfbox-parent/pdfbox/)
* Apache POI (org.apache.poi:poi:3.6 - http://poi.apache.org/)
* Apache POI (org.apache.poi:poi-ooxml:3.6 - http://poi.apache.org/)
* Apache POI (org.apache.poi:poi-ooxml-schemas:3.10.1 - http://poi.apache.org/)
* Apache POI (org.apache.poi:poi-ooxml-schemas:3.6 - http://poi.apache.org/)
* Apache POI (org.apache.poi:poi-scratchpad:3.6 - http://poi.apache.org/)
* Apache Solr Search Server (org.apache.solr:solr:4.10.2 - http://lucene.apache.org/solr-parent/solr)
* Apache Solr Analysis Extras (org.apache.solr:solr-analysis-extras:4.10.2 - http://lucene.apache.org/solr-parent/solr-analysis-extras)
* Apache Solr Content Extraction Library (org.apache.solr:solr-cell:4.10.2 - http://lucene.apache.org/solr-parent/solr-cell)
* Apache Solr Core (org.apache.solr:solr-core:4.10.2 - http://lucene.apache.org/solr-parent/solr-core)
* Apache Solr Solrj (org.apache.solr:solr-solrj:4.10.2 - http://lucene.apache.org/solr-parent/solr-solrj)
* Apache Tika core (org.apache.tika:tika-core:1.5 - http://tika.apache.org/)
* Apache Tika parsers (org.apache.tika:tika-parsers:1.5 - http://tika.apache.org/)
* Apache Tika XMP (org.apache.tika:tika-xmp:1.5 - http://tika.apache.org/)
* Axiom API (org.apache.ws.commons.axiom:axiom-api:1.2.10 - http://ws.apache.org/axiom/axiom-api/)
* Axiom Impl (org.apache.ws.commons.axiom:axiom-impl:1.2.10 - http://ws.apache.org/axiom/axiom-impl/)
* XmlBeans (org.apache.xmlbeans:xmlbeans:2.3.0 - http://xmlbeans.apache.org)
* XmlBeans (org.apache.xmlbeans:xmlbeans:2.6.0 - http://xmlbeans.apache.org)
* zookeeper (org.apache.zookeeper:zookeeper:3.4.6 - no url defined)
* Evo Inflector (org.atteo:evo-inflector:1.0.1 - http://atteo.org/static/evo-inflector)
* TagSoup (org.ccil.cowan.tagsoup:tagsoup:1.2.1 - http://home.ccil.org/~cowan/XML/tagsoup/)
* Jackson (org.codehaus.jackson:jackson-core-asl:1.9.13 - http://jackson.codehaus.org)
* Jackson (org.codehaus.jackson:jackson-core-asl:1.9.2 - http://jackson.codehaus.org)
* JAX-RS provider for JSON content type (org.codehaus.jackson:jackson-jaxrs:1.9.2 - http://jackson.codehaus.org)
* Data Mapper for Jackson (org.codehaus.jackson:jackson-mapper-asl:1.9.13 - http://jackson.codehaus.org)
* Data Mapper for Jackson (org.codehaus.jackson:jackson-mapper-asl:1.9.2 - http://jackson.codehaus.org)
* Xml Compatibility extensions for Jackson (org.codehaus.jackson:jackson-xc:1.9.2 - http://jackson.codehaus.org)
* Jettison (org.codehaus.jettison:jettison:1.1 - no url defined)
* Woodstox (org.codehaus.woodstox:wstx-asl:3.2.0 - http://woodstox.codehaus.org)
* Woodstox (org.codehaus.woodstox:wstx-asl:3.2.7 - http://woodstox.codehaus.org)
* databene ContiPerf (org.databene:contiperf:2.2.0 - http://databene.org/contiperf)
* elasticsearch (org.elasticsearch:elasticsearch:1.4.0 - http://nexus.sonatype.org/oss-repository-hosting.html/elasticsearch)
* flyway-core (org.flywaydb:flyway-core:3.0 - http://flywaydb.org/flyway-core)
* Ogg and Vorbis for Java, Core (org.gagravarr:vorbis-java-core:0.1 - https://github.com/Gagravarr/VorbisJava)
* Apache Tika plugin for Ogg, Vorbis and FLAC (org.gagravarr:vorbis-java-tika:0.1 - https://github.com/Gagravarr/VorbisJava)
* Javassist (org.javassist:javassist:3.16.1-GA - http://www.javassist.org/)
* Jetty Server (org.mortbay.jetty:jetty:6.1.14 - http://jetty.mortbay.org/project/modules/jetty)
* Jetty Server (org.mortbay.jetty:jetty:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/modules/jetty)
* Jetty Servlet Tester (org.mortbay.jetty:jetty-servlet-tester:6.1.14 - http://jetty.mortbay.org/project/jetty-servlet-tester)
* Jetty Utilities (org.mortbay.jetty:jetty-util:6.1.14 - http://jetty.mortbay.org/project/jetty-util)
* Jetty Utilities (org.mortbay.jetty:jetty-util:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-util)
* Servlet Specification API (org.mortbay.jetty:servlet-api:2.5-20081211 - http://jetty.mortbay.org/servlet-api)
* Noggit (org.noggit:noggit:0.5 - http://noggit.org)
* parboiled-core (org.parboiled:parboiled-core:1.1.6 - http://parboiled.org)
* parboiled-java (org.parboiled:parboiled-java:1.1.6 - http://parboiled.org)
* Restlet Core - API and Engine (org.restlet.jee:org.restlet:2.1.1 - http://www.restlet.org/org.restlet)
* Restlet Extension - Servlet (org.restlet.jee:org.restlet.ext.servlet:2.1.1 - http://www.restlet.org/org.restlet.ext.servlet)
* rome-modules (org.rometools:rome-modules:1.0 - http://www.rometools.org)
* spring-aop (org.springframework:spring-aop:3.1.1.RELEASE - no url defined)
* Spring AOP (org.springframework:spring-aop:3.2.5.RELEASE - https://github.com/SpringSource/spring-framework)
* spring-asm (org.springframework:spring-asm:3.1.1.RELEASE - no url defined)
* spring-beans (org.springframework:spring-beans:3.1.1.RELEASE - no url defined)
* Spring Beans (org.springframework:spring-beans:3.2.5.RELEASE - https://github.com/SpringSource/spring-framework)
* spring-context (org.springframework:spring-context:3.1.1.RELEASE - no url defined)
* Spring Context (org.springframework:spring-context:3.2.5.RELEASE - https://github.com/SpringSource/spring-framework)
* spring-context-support (org.springframework:spring-context-support:3.1.1.RELEASE - no url defined)
* spring-core (org.springframework:spring-core:3.1.1.RELEASE - no url defined)
* Spring Core (org.springframework:spring-core:3.2.5.RELEASE - https://github.com/SpringSource/spring-framework)
* spring-expression (org.springframework:spring-expression:3.1.1.RELEASE - no url defined)
* Spring Expression Language (SpEL) (org.springframework:spring-expression:3.2.5.RELEASE - https://github.com/SpringSource/spring-framework)
* spring-jdbc (org.springframework:spring-jdbc:3.1.1.RELEASE - no url defined)
* Spring Framework: Mock (org.springframework:spring-mock:2.0.8 - http://www.springframework.org)
* Spring TestContext Framework (org.springframework:spring-test:3.2.5.RELEASE - https://github.com/SpringSource/spring-framework)
* spring-tx (org.springframework:spring-tx:3.1.1.RELEASE - no url defined)
* spring-web (org.springframework:spring-web:3.1.1.RELEASE - no url defined)
* Spring Web (org.springframework:spring-web:3.2.5.RELEASE - https://github.com/SpringSource/spring-framework)
* spring-webmvc (org.springframework:spring-webmvc:3.1.1.RELEASE - no url defined)
* Spring Web MVC (org.springframework:spring-webmvc:3.2.5.RELEASE - https://github.com/SpringSource/spring-framework)
* SWORD Java API, GUI and CLI (org.swordapp:sword-common:1.1 - http://nexus.sonatype.org/oss-repository-hosting.html/sword-common)
* SWORD v2 :: Common Server Library (org.swordapp:sword2-server:1.0 - http://www.swordapp.org/)
* xml-matchers (org.xmlmatchers:xml-matchers:0.10 - http://code.google.com/p/xml-matchers/)
* oro (oro:oro:2.0.8 - no url defined)
* JUnitParams (pl.pragmatists:JUnitParams:1.0.2 - http://junitparams.googlecode.com)
* Rome A9 OpenSearch (rome:opensearch:0.1 - http://wiki.java.net/bin/view/Javawsxml/OpenSearch)
* ROME, RSS and atOM utilitiEs for Java (rome:rome:1.0 - https://rome.dev.java.net/)
* oai4j (se.kb:oai4j:0.6b1 - http://oai4j-client.sourceforge.net/)
* StAX API (stax:stax-api:1.0.1 - http://stax.codehaus.org/)
* standard (taglibs:standard:1.1.2 - no url defined)
* xalan (xalan:xalan:2.7.0 - no url defined)
* Xerces2 Java Parser (xerces:xercesImpl:2.8.1 - http://xerces.apache.org/xerces2-j/)
* xmlParserAPIs (xerces:xmlParserAPIs:2.6.2 - no url defined)
* XML Commons External Components XML APIs (xml-apis:xml-apis:1.0.b2 - http://xml.apache.org/commons/#external)
* xml-apis (xml-apis:xml-apis:1.3.02 - http://xml.apache.org/commons/#external)
* xmlParserAPIs (xml-apis:xmlParserAPIs:2.0.2 - no url defined)
* XML Commons Resolver Component (xml-resolver:xml-resolver:1.2 - http://xml.apache.org/commons/components/resolver/)
Common Public License v1.0 (http://www.opensource.org/licenses/cpl1.0)
* JUnit (junit:*) - http://junit.org/license
* WSDL4J (wsdl4j:*) - http://sourceforge.net/projects/wsdl4j/
BSD License:
Lesser GPL (http://www.opensource.org/licenses/LGPL-2.1)
* JExcelAPI (net.sourceforge.jexcelapi:*) - http://sourceforge.net/projects/jexcelapi/
* MaxMind GeoIP (org.dspace.dependencies:dspace-geoip) - http://geoip.cvs.sourceforge.net/viewvc/geoip/java/LICENSE
* METS Java Toolkit (org.dspace.mets:*) - http://hul.harvard.edu/mets/
* Text-mining (org.dspace.dependencies:dspace-tm-extractors) - http://code.google.com/p/text-mining/
* XOM (xom:*) - http://www.xom.nu/
* ASM Core (asm:asm:3.1 - http://asm.objectweb.org/asm/)
* XMP Library for Java (com.adobe.xmp:xmpcore:5.1.2 - http://www.adobe.com/devnet/xmp.html)
* coverity-escapers (com.coverity.security:coverity-escapers:1.1.1 - http://coverity.com/security)
* JSONLD Java :: Core (com.github.jsonld-java:jsonld-java:0.5.0 - http://github.com/jsonld-java/jsonld-java/jsonld-java/)
* Protocol Buffer Java API (com.google.protobuf:protobuf-java:2.5.0 - http://code.google.com/p/protobuf)
* Jena IRI (com.hp.hpl.jena:iri:0.8 - http://jena.sf.net/iri)
* Jena (com.hp.hpl.jena:jena:2.6.4 - http://www.openjena.org/)
* yui compressor (com.yahoo.platform.yui:yuicompressor:2.3.6 - http://developer.yahoo.com/yui/compressor/)
* dnsjava (dnsjava:dnsjava:2.1.1 - http://www.dnsjava.org)
* dom4j (dom4j:dom4j:1.6.1 - http://dom4j.org)
* Biblio Transformation Engine :: Core (gr.ekt.bte:bte-core:0.9.3.5 - http://github.com/EKT/Biblio-Transformation-Engine/bte-core)
* Biblio Transformation Engine :: Input/Output (gr.ekt.bte:bte-io:0.9.3.5 - http://github.com/EKT/Biblio-Transformation-Engine/bte-io)
* jaxen (jaxen:jaxen:1.1 - http://jaxen.codehaus.org/)
* ANTLR 3 Runtime (org.antlr:antlr-runtime:3.5 - http://www.antlr.org)
* Morfologik FSA (org.carrot2:morfologik-fsa:1.7.1 - http://morfologik.blogspot.com/morfologik-fsa/)
* Morfologik Stemming Dictionary for Polish (org.carrot2:morfologik-polish:1.7.1 - http://morfologik.blogspot.com/morfologik-polish/)
* Morfologik Stemming APIs (org.carrot2:morfologik-stemming:1.7.1 - http://morfologik.blogspot.com/morfologik-stemming/)
* databene ContiPerf (org.databene:contiperf:2.2.0 - http://databene.org/contiperf)
* DSpace Kernel :: API and Implementation (org.dspace:dspace-api:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api)
* DSpace JSP-UI (org.dspace:dspace-jspui:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui)
* DSpace OAI-PMH (org.dspace:dspace-oai:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai)
* DSpace RDF (org.dspace:dspace-rdf:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf)
* DSpace REST :: API and Implementation (org.dspace:dspace-rest:5.0-rc4-SNAPSHOT - http://demo.dspace.org)
* DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services)
* Apache Solr Webapp (org.dspace:dspace-solr:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr)
* DSpace SWORD (org.dspace:dspace-sword:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword)
* DSpace SWORD v2 (org.dspace:dspace-swordv2:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2)
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui)
* handle (org.dspace:handle:6.2 - no url defined)
* jargon (org.dspace:jargon:1.4.25 - no url defined)
* mets (org.dspace:mets:1.5.2 - no url defined)
* oclc-harvester2 (org.dspace:oclc-harvester2:0.1.12 - no url defined)
* Repackaged Cocoon Servlet Service Implementation (org.dspace.dependencies.cocoon:dspace-cocoon-servlet-service-impl:1.0.3 - http://projects.dspace.org/dspace-pom/dspace-cocoon-servlet-service-impl)
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
* Hamcrest All (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all)
* Hamcrest Core (org.hamcrest:hamcrest-core:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-core)
* JBibTeX (org.jbibtex:jbibtex:1.0.10 - http://www.jbibtex.org)
* ASM Core (org.ow2.asm:asm:4.1 - http://asm.objectweb.org/asm/)
* ASM Core (org.ow2.asm:asm:4.2 - http://asm.objectweb.org/asm/)
* ASM Analysis (org.ow2.asm:asm-analysis:4.1 - http://asm.objectweb.org/asm-analysis/)
* ASM Commons (org.ow2.asm:asm-commons:4.1 - http://asm.objectweb.org/asm-commons/)
* ASM Tree (org.ow2.asm:asm-tree:4.1 - http://asm.objectweb.org/asm-tree/)
* ASM Util (org.ow2.asm:asm-util:4.1 - http://asm.objectweb.org/asm-util/)
* PostgreSQL JDBC Driver (postgresql:postgresql:9.1-901-1.jdbc4 - http://jdbc.postgresql.org)
* XMLUnit for Java (xmlunit:xmlunit:1.1 - http://xmlunit.sourceforge.net/)
* XMLUnit for Java (xmlunit:xmlunit:1.3 - http://xmlunit.sourceforge.net/)
MIT / X11 License (or adaptations) (http://www.opensource.org/licenses/MIT)
* Bouncy Castle (org.bouncycastle:*) - http://www.bouncycastle.org/licence.html
* jmockit (org.dspace.dependencies.jmockit:dspace-jmockit) - http://code.google.com/p/jmockit/
* SLF4J (org.slf4j:*) - http://www.slf4j.org/license.html
Mozilla Public License (http://www.opensource.org/licenses/MPL-2.0)
* H2 database (com.h2database:*) - http://www.h2database.com/html/license.html
Common Development and Distribution License (CDDL):
New BSD License (http://opensource.org/licenses/bsd-license.php)
* Biblio-Transformation Engine (gr.ekt.bte:*) - http://github.com/EKT/Biblio-Transformation-Engine
Other Open Source Licenses:
* AOP Alliance (aopalliance:*) - Public Domain: http://aopalliance.sourceforge.net/
* coverity-escapers (com.coverity.security:*) - Modified BSD
* Handle API (org.dspace:handle) - Handle Public License Agreement: http://www.handle.net/HSj/hdlnet-2-LICENSE.pdf
* ICU4J (com.ibm.icu:icu4j) - ICU License : http://source.icu-project.org/repos/icu/icu/trunk/license.html
* JDOM (jdom:*) - JDOM License : https://github.com/hunterhacker/jdom/blob/master/LICENSE.txt
* OCLC Harvester2 (org.dspace:oclc-harvester2) - OCLC Research Public License: http://www.oclc.org/research/activities/software/license/v2final.html
* PostgreSQL (postgresql:*) - PostgreSQL License (BSD-based): http://www.postgresql.org/about/licence/
* Pull-parser / XPP3 (pull-parser:*, xpp3:*) - Indiana University Extreme! Lab Software License (BSD-based): http://www.extreme.indiana.edu/xgws/xsoap/xpp/download/PullParser2/LICENSE.txt
----
NOTE #1: Some individual web application files in DSpace (e.g. Javascript
libraries, CSS Frameworks) may have their own open source license. In that
scenario, we place a copy of the full text of the license alongside the
licensed files. You can locate these additional licenses in our codebase
by searching for files with a ".LICENSE" file extension.
* jersey-core (com.sun.jersey:jersey-core:1.17.1 - https://jersey.java.net/jersey-core/)
* jersey-json (com.sun.jersey:jersey-json:1.17.1 - https://jersey.java.net/jersey-json/)
* jersey-server (com.sun.jersey:jersey-server:1.17.1 - https://jersey.java.net/jersey-server/)
* jersey-servlet (com.sun.jersey:jersey-servlet:1.17.1 - https://jersey.java.net/jersey-servlet/)
* jersey-spring (com.sun.jersey.contribs:jersey-spring:1.8 - http://maven.apache.org)
* JAXB RI (com.sun.xml.bind:jaxb-impl:2.2.3-1 - http://jaxb.java.net/)
* JAXB Reference Implementation (com.sun.xml.bind:jaxb-impl:2.2.5 - http://jaxb.java.net/)
* JHighlight (com.uwyn:jhighlight:1.0 - https://jhighlight.dev.java.net/)
* JavaBeans Activation Framework (JAF) (javax.activation:activation:1.1 - http://java.sun.com/products/javabeans/jaf/index.jsp)
* JavaMail API (javax.mail:mail:1.4 - https://glassfish.dev.java.net/javaee5/mail/)
* Java Servlet API (javax.servlet:javax.servlet-api:3.1.0 - http://servlet-spec.java.net)
* jsp-api (javax.servlet:jsp-api:2.0 - no url defined)
* jstl (javax.servlet:jstl:1.1.2 - no url defined)
* servlet-api (javax.servlet:servlet-api:2.5 - no url defined)
* JAXB API bundle for GlassFish V3 (javax.xml.bind:jaxb-api:2.2.2 - https://jaxb.dev.java.net/)
* Streaming API for XML (javax.xml.stream:stax-api:1.0-2 - no url defined)
* Servlet Specification 2.5 API (org.mortbay.jetty:servlet-api-2.5:6.1.14 - http://jetty.mortbay.org/project/modules/servlet-api-2.5)
* Restlet Core - API and Engine (org.restlet.jee:org.restlet:2.1.1 - http://www.restlet.org/org.restlet)
* Restlet Extension - Servlet (org.restlet.jee:org.restlet.ext.servlet:2.1.1 - http://www.restlet.org/org.restlet.ext.servlet)
For example, on Linux you can use the 'find' command from the source directory:
Eclipse Public License:
find . -type f -name "*.LICENSE"
----
* JUnit (junit:junit:4.11 - http://junit.org)
* AspectJ runtime (org.aspectj:aspectjrt:1.6.11 - http://www.aspectj.org)
* databene ContiPerf (org.databene:contiperf:2.2.0 - http://databene.org/contiperf)
* Jetty Server (org.mortbay.jetty:jetty:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/modules/jetty)
* Jetty Utilities (org.mortbay.jetty:jetty-util:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-util)
* Restlet Core - API and Engine (org.restlet.jee:org.restlet:2.1.1 - http://www.restlet.org/org.restlet)
* Restlet Extension - Servlet (org.restlet.jee:org.restlet.ext.servlet:2.1.1 - http://www.restlet.org/org.restlet.ext.servlet)
----
NOTE #2: Although we try to keep this libraries list current, the latest
information about DSpace third party libraries can be found by running the
following Maven command(s):
GNU General Public Library:
mvn project-info-reports:dependencies
* Streaming API for XML (javax.xml.stream:stax-api:1.0-2 - no url defined)
This generates a "[project]/target/site/dependencies.html" report under every
DSpace project directory. This report lists all dependencies and their license
(if it can be determined by Maven).
GNU Lesser General Public License (LGPL):
Additionally, you may wish to run:
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.3.0 - http://wiki.fasterxml.com/JacksonHome)
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.3.3 - http://wiki.fasterxml.com/JacksonHome)
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.3.3 - http://wiki.fasterxml.com/JacksonHome)
* FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.0 - http://findbugs.sourceforge.net/)
* MaxMind GeoIP API (com.maxmind.geoip:geoip-api:1.2.11 - https://github.com/maxmind/geoip-api-java)
* JHighlight (com.uwyn:jhighlight:1.0 - https://jhighlight.dev.java.net/)
* JAX-RS provider for JSON content type (org.codehaus.jackson:jackson-jaxrs:1.9.2 - http://jackson.codehaus.org)
* Xml Compatibility extensions for Jackson (org.codehaus.jackson:jackson-xc:1.9.2 - http://jackson.codehaus.org)
* databene ContiPerf (org.databene:contiperf:2.2.0 - http://databene.org/contiperf)
* DSpace TM-Extractors Dependency (org.dspace.dependencies:dspace-tm-extractors:1.0.1 - http://projects.dspace.org/dspace-pom/dspace-tm-extractors)
* im4java (org.im4java:im4java:1.4.0 - http://sourceforge.net/projects/im4java/)
* Javassist (org.javassist:javassist:3.16.1-GA - http://www.javassist.org/)
* org.jdesktop - Swing Worker (org.jdesktop:swing-worker:1.1 - no url defined)
* Restlet Core - API and Engine (org.restlet.jee:org.restlet:2.1.1 - http://www.restlet.org/org.restlet)
* Restlet Extension - Servlet (org.restlet.jee:org.restlet.ext.servlet:2.1.1 - http://www.restlet.org/org.restlet.ext.servlet)
* xom (xom:xom:1.1 - http://www.xom.nu)
mvn project-info-reports:dependency-convergence
ICU License:
This generates a summary report at
"[dspace]/target/site/dependency-convergence.html" which lists all dependencies
of all DSpace projects (though it does not list license information)
* ICU4J (com.ibm.icu:icu4j:51.1 - http://icu-project.org/)
For more information see the maven-project-info-reports-plugin:
http://maven.apache.org/plugins/maven-project-info-reports-plugin/
----
JDOM License:
* jdom (jdom:jdom:1.0 - no url defined)
MIT License:
* Bouncy Castle CMS and S/MIME API (org.bouncycastle:bcmail-jdk15:1.44 - http://www.bouncycastle.org/java.html)
* Bouncy Castle Provider (org.bouncycastle:bcprov-jdk15:1.44 - http://www.bouncycastle.org/java.html)
* Main (org.jmockit:jmockit:1.10 - http://www.jmockit.org)
* OpenCloud (org.mcavallo:opencloud:0.3 - http://opencloud.mcavallo.org/)
* Mockito (org.mockito:mockito-all:1.9.5 - http://www.mockito.org)
* Mockito (org.mockito:mockito-core:1.9.5 - http://www.mockito.org)
* Objenesis (org.objenesis:objenesis:1.0 - http://objenesis.googlecode.com/svn/docs/index.html)
* JCL 1.1.1 implemented over SLF4J (org.slf4j:jcl-over-slf4j:1.6.1 - http://www.slf4j.org)
* JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.6.1 - http://www.slf4j.org)
* SLF4J API Module (org.slf4j:slf4j-api:1.6.1 - http://www.slf4j.org)
* SLF4J JDK14 Binding (org.slf4j:slf4j-jdk14:1.6.1 - http://www.slf4j.org)
* SLF4J LOG4J-12 Binding (org.slf4j:slf4j-log4j12:1.6.1 - http://www.slf4j.org)
Mozilla Public License:
* juniversalchardet (com.googlecode.juniversalchardet:juniversalchardet:1.0.3 - http://juniversalchardet.googlecode.com/)
* H2 Database Engine (com.h2database:h2:1.4.180 - http://www.h2database.com)
* Javassist (org.javassist:javassist:3.16.1-GA - http://www.javassist.org/)
* Rhino (rhino:js:1.6R7 - http://www.mozilla.org/rhino/)
Public Domain:
* AOP alliance (aopalliance:aopalliance:1.0 - http://aopalliance.sourceforge.net)
* Dough Lea's util.concurrent package (concurrent:concurrent:1.3.4 - no url defined)
* Reflections (org.reflections:reflections:0.9.9-RC1 - http://code.google.com/p/reflections/reflections/)
* XZ for Java (org.tukaani:xz:1.4 - http://tukaani.org/xz/java.html)
Unknown license:
* DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:5.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-api-lang)
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:5.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-xmlui-lang)

2
README
View File

@@ -51,4 +51,4 @@ places to seek help, news articles and lists of other users, please see:
DSpace source code licensing information available online at:
- http://www.dspace.org/license/
Copyright (c) 2002-2014, DuraSpace. All rights reserved.
Copyright (c) 2002-2015, DuraSpace. All rights reserved.

View File

@@ -56,25 +56,21 @@ default.language = en_US
# Uncomment the appropriate block below for your database.
# postgres
db.driver=org.postgresql.Driver
db.dialect=org.dspace.storage.rdbms.hibernate.postgres.DSpacePostgreSQL82Dialect
db.url=jdbc:postgresql://localhost:5432/dspace
db.username=dspace
db.password=dspace
db.schema = public
# oracle
#db.driver= oracle.jdbc.OracleDriver
#db.dialect=org.hibernate.dialect.Oracle10gDialect
#db.url=jdbc:oracle:thin:@//localhost:1521/xe
#db.username=dspace
#db.password=dspace
# The schema in oracle is the usually the user name
#db.schema = dspace
# Schema name - if your database contains multiple schemas, you can avoid problems with
# retrieving the definitions of duplicate object names by specifying
# the schema name here that is used for DSpace by uncommenting the following entry
# NOTE: this configuration option is for PostgreSQL only. For Oracle, schema is equivalent
# to user name. DSpace depends on the PostgreSQL understanding of schema. If you are using
# Oracle, just leave this this value blank.
db.schema =
# Maximum number of DB connections in pool
db.maxconnections = 30
@@ -85,12 +81,6 @@ db.maxwait = 5000
# Maximum number of idle connections in pool (-1 = unlimited)
db.maxidle = -1
# Determine if prepared statement should be cached. (default is true)
db.statementpool = true
# Specify a name for the connection pool (useful if you have multiple applications sharing Tomcat's dbcp)
# If not specified, defaults to 'dspacepool'
db.poolname = dspacepool
#######################
# EMAIL CONFIGURATION #

View File

@@ -12,7 +12,7 @@
<parent>
<groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId>
<version>5.0-rc3-SNAPSHOT</version>
<version>6.0-SNAPSHOT</version>
<relativePath>..</relativePath>
</parent>
@@ -89,7 +89,7 @@
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>1.9</version>
<version>1.9.1</version>
<executions>
<execution>
<phase>validate</phase>
@@ -103,7 +103,7 @@
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>buildnumber-maven-plugin</artifactId>
<version>1.3</version>
<version>1.4</version>
<executions>
<execution>
<phase>validate</phase>
@@ -113,6 +113,7 @@
</execution>
</executions>
</plugin>
</plugins>
</build>
@@ -136,6 +137,7 @@
</plugins>
</build>
</profile>
<!-- If Unit Testing is enabled, then setup the Unit Test Environment.
See also the 'skiptests' profile in Parent POM. -->
<profile>
@@ -156,6 +158,18 @@
<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<version>2.8</version>
<configuration>
<outputDirectory>${project.build.directory}/testing</outputDirectory>
<artifactItems>
<artifactItem>
<groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId>
<version>${project.version}</version>
<type>zip</type>
<classifier>testEnvironment</classifier>
</artifactItem>
</artifactItems>
</configuration>
<executions>
<execution>
<id>setupTestEnvironment</id>
@@ -163,18 +177,13 @@
<goals>
<goal>unpack</goal>
</goals>
<configuration>
<outputDirectory>${project.build.directory}/testing</outputDirectory>
<artifactItems>
<artifactItem>
<groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId>
<version>${project.version}</version>
<type>zip</type>
<classifier>testEnvironment</classifier>
</artifactItem>
</artifactItems>
</configuration>
</execution>
<execution>
<id>setupIntegrationTestEnvironment</id>
<phase>pre-integration-test</phase>
<goals>
<goal>unpack</goal>
</goals>
</execution>
</executions>
</plugin>
@@ -196,7 +205,7 @@
<executions>
<execution>
<id>setproperty</id>
<phase>generate-test-resources</phase>
<phase>generate-test-resources</phase> <!-- XXX I think this should be 'initialize' - MHW -->
<goals>
<goal>execute</goal>
</goals>
@@ -259,7 +268,16 @@
<phase>process-test-resources</phase>
<configuration>
<target>
<!-- Ant task to copy dspace.cfg.woven to location of test dspace.cfg file -->
<copy file="${agnostic.build.dir}/testing/dspace.cfg.woven" tofile="${agnostic.build.dir}/testing/dspace/config/dspace.cfg" />
<!-- Now, do one final filter of our Test configs, replacing any remaining "${dspace.dir}"
placeholders, with the full path of our Unit Test directory -->
<!-- NOTE: This final filtering is necessary, because dspace.dir doesn't get filled out
in our test dspace.cfg until Fileweaver runs above. -->
<replace dir="${agnostic.build.dir}/testing/dspace/config/" value="${agnostic.build.dir}/testing/dspace">
<include name="**/*"/>
<replacetoken>${dspace.dir}</replacetoken>
</replace>
</target>
</configuration>
<goals>
@@ -269,7 +287,7 @@
</executions>
</plugin>
<!-- Run Unit/Integration Testing! This plugin just kicks off the tests (when enabled). -->
<!-- Run Unit Testing! This plugin just kicks off the tests (when enabled). -->
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
@@ -281,6 +299,19 @@
</systemPropertyVariables>
</configuration>
</plugin>
<!-- Run Integration Testing! This plugin just kicks off the tests (when enabled). -->
<plugin>
<artifactId>maven-failsafe-plugin</artifactId>
<configuration>
<systemPropertyVariables>
<!-- Specify the dspace.cfg file to use for test environment -->
<dspace.configuration>${agnostic.build.dir}/testing/dspace/config/dspace.cfg</dspace.configuration>
<!-- Turn off any DSpace logging -->
<dspace.log.init.disable>true</dspace.log.init.disable>
</systemPropertyVariables>
</configuration>
</plugin>
</plugins>
</build>
@@ -289,6 +320,18 @@
<dependencies>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-ehcache</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-orm</artifactId>
</dependency>
<dependency>
<groupId>org.dspace</groupId>
<artifactId>handle</artifactId>
@@ -335,8 +378,8 @@
<artifactId>commons-collections</artifactId>
</dependency>
<dependency>
<groupId>commons-dbcp</groupId>
<artifactId>commons-dbcp</artifactId>
<groupId>org.apache.commons</groupId>
<artifactId>commons-dbcp2</artifactId>
</dependency>
<dependency>
<groupId>commons-fileupload</groupId>
@@ -352,8 +395,8 @@
<artifactId>commons-lang</artifactId>
</dependency>
<dependency>
<groupId>commons-pool</groupId>
<artifactId>commons-pool</artifactId>
<groupId>org.apache.commons</groupId>
<artifactId>commons-pool2</artifactId>
</dependency>
<dependency>
<groupId>commons-validator</groupId>
@@ -569,7 +612,7 @@
<dependency>
<groupId>postgresql</groupId>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
</dependency>
@@ -595,7 +638,7 @@
<dependency>
<groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId>
<version>3.0</version>
<version>3.2.1</version>
</dependency>
<!-- Google Analytics -->
@@ -603,13 +646,21 @@
<groupId>com.google.apis</groupId>
<artifactId>google-api-services-analytics</artifactId>
</dependency>
<dependency>
<groupId>com.google.api-client</groupId>
<artifactId>google-api-client</artifactId>
</dependency>
<dependency>
<groupId>com.google.http-client</groupId>
<artifactId>google-http-client</artifactId>
</dependency>
<dependency>
<groupId>com.google.http-client</groupId>
<artifactId>google-http-client-jackson2</artifactId>
</dependency>
<dependency>
<groupId>com.google.oauth-client</groupId>
<artifactId>google-oauth-client-jetty</artifactId>
<artifactId>google-oauth-client</artifactId>
</dependency>
<!-- FindBugs -->
<dependency>
@@ -625,6 +676,34 @@
<artifactId>joda-time</artifactId>
<version>2.3</version>
</dependency>
<dependency>
<groupId>javax.inject</groupId>
<artifactId>javax.inject</artifactId>
<version>1</version>
<type>jar</type>
</dependency>
<!-- S3 -->
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-s3</artifactId>
<version>1.10.26</version>
</dependency>
<!-- S3 also wanted jackson... -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>2.5.3</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.5.3</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>2.5.3</version>
</dependency>
</dependencies>
</project>

View File

@@ -9,6 +9,8 @@ package org.dspace.administer;
import java.io.IOException;
import java.sql.SQLException;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
@@ -17,10 +19,12 @@ import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Community;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CommunityService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
/**
* A command-line tool for setting/removing community/sub-community
@@ -32,6 +36,15 @@ import org.dspace.storage.rdbms.DatabaseManager;
public class CommunityFiliator
{
protected CommunityService communityService;
protected HandleService handleService;
public CommunityFiliator() {
communityService = ContentServiceFactory.getInstance().getCommunityService();
handleService = HandleServiceFactory.getInstance().getHandleService();
}
public static void main(String[] argv) throws Exception
{
// create an options object and populate it
@@ -166,7 +179,7 @@ public class CommunityFiliator
// check that a valid filiation would be established
// first test - proposed child must currently be an orphan (i.e.
// top-level)
Community childDad = child.getParentCommunity();
Community childDad = child.getParentCommunities() != null ? child.getParentCommunities().iterator().next() : null;
if (childDad != null)
{
@@ -177,11 +190,11 @@ public class CommunityFiliator
// second test - circularity: parent's parents can't include proposed
// child
Community[] parentDads = parent.getAllParents();
List<Community> parentDads = parent.getParentCommunities();
for (int i = 0; i < parentDads.length; i++)
for (int i = 0; i < parentDads.size(); i++)
{
if (parentDads[i].getID() == child.getID())
if (parentDads.get(i).getID().equals(child.getID()))
{
System.out
.println("Error, circular parentage - child is parent of parent");
@@ -190,7 +203,7 @@ public class CommunityFiliator
}
// everthing's OK
parent.addSubcommunity(child);
communityService.addSubcommunity(c, parent, child);
// complete the pending transaction
c.complete();
@@ -202,12 +215,12 @@ public class CommunityFiliator
throws SQLException, AuthorizeException, IOException
{
// verify that child is indeed a child of parent
Community[] parentKids = parent.getSubcommunities();
List<Community> parentKids = parent.getSubcommunities();
boolean isChild = false;
for (int i = 0; i < parentKids.length; i++)
for (int i = 0; i < parentKids.size(); i++)
{
if (parentKids[i].getID() == child.getID())
if (parentKids.get(i).getID().equals(child.getID()))
{
isChild = true;
@@ -224,9 +237,10 @@ public class CommunityFiliator
// OK remove the mappings - but leave the community, which will become
// top-level
DatabaseManager.updateQuery(c,
"DELETE FROM community2community WHERE parent_comm_id= ? "+
"AND child_comm_id= ? ", parent.getID(), child.getID());
child.getParentCommunities().remove(parent);
parent.getSubcommunities().remove(child);
communityService.update(c, child);
communityService.update(c, parent);
// complete the pending transaction
c.complete();
@@ -235,7 +249,7 @@ public class CommunityFiliator
+ "'");
}
private Community resolveCommunity(Context c, String communityID)
protected Community resolveCommunity(Context c, String communityID)
throws SQLException
{
Community community = null;
@@ -243,7 +257,7 @@ public class CommunityFiliator
if (communityID.indexOf('/') != -1)
{
// has a / must be a handle
community = (Community) HandleManager.resolveToObject(c,
community = (Community) handleService.resolveToObject(c,
communityID);
// ensure it's a community
@@ -255,7 +269,7 @@ public class CommunityFiliator
}
else
{
community = Community.find(c, Integer.parseInt(communityID));
community = communityService.find(c, UUID.fromString(communityID));
}
return community;

View File

@@ -22,6 +22,9 @@ import org.dspace.core.Context;
import org.dspace.core.I18nUtil;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.eperson.service.GroupService;
/**
* A command-line tool for creating an initial administrator for setting up a
@@ -46,7 +49,10 @@ public final class CreateAdministrator
{
/** DSpace Context object */
private final Context context;
protected EPersonService ePersonService;
protected GroupService groupService;
/**
* For invoking via the command line. If called with no command line arguments,
* it will negotiate with the user for the administrator details
@@ -88,10 +94,12 @@ public final class CreateAdministrator
*
* @throws Exception
*/
private CreateAdministrator()
protected CreateAdministrator()
throws Exception
{
context = new Context();
groupService = EPersonServiceFactory.getInstance().getGroupService();
ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
}
/**
@@ -100,7 +108,7 @@ public final class CreateAdministrator
*
* @throws Exception
*/
private void negotiateAdministratorDetails()
protected void negotiateAdministratorDetails()
throws Exception
{
Console console = System.console();
@@ -222,7 +230,7 @@ public final class CreateAdministrator
*
* @throws Exception
*/
private void createAdministrator(String email, String first, String last,
protected void createAdministrator(String email, String first, String last,
String language, String pw)
throws Exception
{
@@ -231,7 +239,7 @@ public final class CreateAdministrator
context.setIgnoreAuthorization(true);
// Find administrator group
Group admins = Group.find(context, 1);
Group admins = groupService.findByName(context, Group.ADMIN);
if (admins == null)
{
@@ -239,27 +247,27 @@ public final class CreateAdministrator
}
// Create the administrator e-person
EPerson eperson = EPerson.findByEmail(context,email);
EPerson eperson = ePersonService.findByEmail(context,email);
// check if the email belongs to a registered user,
// if not create a new user with this email
if (eperson == null)
{
eperson = EPerson.create(context);
eperson = ePersonService.create(context);
eperson.setEmail(email);
eperson.setCanLogIn(true);
eperson.setRequireCertificate(false);
eperson.setSelfRegistered(false);
}
eperson.setLastName(last);
eperson.setFirstName(first);
eperson.setLanguage(language);
eperson.setPassword(pw);
eperson.update();
eperson.setLastName(context, last);
eperson.setFirstName(context, first);
eperson.setLanguage(context, language);
ePersonService.setPassword(eperson, pw);
ePersonService.update(context, eperson);
admins.addMember(eperson);
admins.update();
groupService.addMember(context, admins, eperson);
groupService.update(context, admins);
context.complete();

View File

@@ -1,287 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
import java.io.IOException;
import java.sql.SQLException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.NonUniqueMetadataException;
import org.dspace.core.Context;
/**
* Class representing a particular Dublin Core metadata type, with various
* utility methods. In general, only used for manipulating the registry of
* Dublin Core types in the system, so most users will not need this.
*
* <p>
* The DCType implementation has been deprecated, please use MetadataManager,
* MetadataSchema and MetadataField instead. For backward compatibility the this
* implementation has been updated to transparently call the new classes.
* </p>
*
* @author Robert Tansley
* @author Martin Hald
* @version $Revision$
* @deprecated
*/
public class DCType
{
/** Our context */
private Context ourContext;
/** The matching metadata field */
private MetadataField field = new MetadataField();
/**
* Create a DCType from an existing metadata field.
*
* @param context
* @param field
* @deprecated
*/
public DCType(Context context, MetadataField field)
{
this.ourContext = context;
this.field = field;
}
/**
* Default constructor.
*
* @param context
* @deprecated
*/
public DCType(Context context)
{
this.ourContext = context;
}
/**
* Utility method for quick access to an element and qualifier given the
* type ID.
*
* @param context
* context, in case DC types need to be read in from DB
* @param id
* the DC type ID
* @return a two-String array, string 0 is the element, string 1 is the
* qualifier
* @deprecated
*/
public static String[] quickFind(Context context, int id)
throws SQLException
{
MetadataField field = MetadataField.find(context, id);
String[] result = new String[2];
if (field == null)
{
return result;
}
else
{
result[0] = field.getElement();
result[1] = field.getQualifier();
return result;
}
}
/**
* Get a metadata field from the database.
*
* @param context
* DSpace context object
* @param id
* ID of the dublin core type
*
* @return the metadata field, or null if the ID is invalid.
* @deprecated
*/
public static DCType find(Context context, int id) throws SQLException
{
MetadataField field = MetadataField.find(context, id);
return new DCType(context, field);
}
/**
* Find a given Dublin Core type. Returns <code>null</code> if the Dublin
* Core type doesn't exist.
*
* @param context
* the DSpace context to use
* @param element
* the element to find
* @param qualifier
* the qualifier, or <code>null</code> to find an unqualified
* type
*
* @return the Dublin Core type, or <code>null</code> if there isn't a
* corresponding type in the registry
* @throws AuthorizeException
* @deprecated
*/
public static DCType findByElement(Context context, String element,
String qualifier) throws SQLException, AuthorizeException
{
MetadataField field = MetadataField.findByElement(context,
MetadataSchema.DC_SCHEMA_ID, element, qualifier);
if (field == null)
{
return null;
}
else
{
return new DCType(context, field);
}
}
/**
* Retrieve all Dublin Core types from the registry
*
* @return an array of all the Dublin Core types
* @deprecated
*/
public static DCType[] findAll(Context context) throws SQLException
{
MetadataField field[] = MetadataField.findAll(context);
DCType[] typeArray = new DCType[field.length];
for (int ii = 0; ii < field.length; ii++)
{
typeArray[ii] = new DCType(context, field[ii]);
}
// Return the array
return typeArray;
}
/**
* Create a new Dublin Core type
*
* @param context
* DSpace context object
* @return the newly created DCType
* @throws NonUniqueMetadataException
* @throws IOException
* @deprecated
*/
public static DCType create(Context context) throws SQLException,
AuthorizeException, IOException, NonUniqueMetadataException
{
MetadataField field = new MetadataField();
field.setSchemaID(MetadataSchema.DC_SCHEMA_ID);
field.create(context);
return new DCType(context, field);
}
/**
* Delete this DC type. This won't work if there are any DC values in the
* database of this type - they need to be updated first. An
* <code>SQLException</code> (referential integrity violation) will be
* thrown in this case.
* @deprecated
*/
public void delete() throws SQLException, AuthorizeException
{
field.delete(ourContext);
}
/**
* Get the internal identifier of this metadata field
*
* @return the internal identifier
*/
public int getID()
{
return field.getFieldID();
}
/**
* Get the DC element
*
* @return the element
*/
public String getElement()
{
return field.getElement();
}
/**
* Set the DC element
*
* @param s
* the new element
*/
public void setElement(String s)
{
field.setElement(s);
}
/**
* Get the DC qualifier, if any.
*
* @return the DC qualifier, or <code>null</code> if this is an
* unqualified element
*/
public String getQualifier()
{
return field.getQualifier();
}
/**
* Set the DC qualifier
*
* @param s
* the DC qualifier, or <code>null</code> if this is an
* unqualified element
*/
public void setQualifier(String s)
{
field.setQualifier(s);
}
/**
* Get the scope note - information about the DC type and its use
*
* @return the scope note
*/
public String getScopeNote()
{
return field.getScopeNote();
}
/**
* Set the scope note
*
* @param s
* the new scope note
*/
public void setScopeNote(String s)
{
field.setScopeNote(s);
}
/**
* Update the dublin core registry
*
* @throws IOException
* @throws NonUniqueMetadataException
* @deprecated
*/
public void update() throws SQLException, AuthorizeException,
NonUniqueMetadataException, IOException
{
field.update(ourContext);
}
}

View File

@@ -12,6 +12,7 @@ import java.io.FileWriter;
import java.io.IOException;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.cli.CommandLine;
@@ -24,6 +25,9 @@ import org.apache.xml.serialize.OutputFormat;
import org.apache.xml.serialize.XMLSerializer;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataSchemaService;
import org.dspace.core.Context;
import org.xml.sax.SAXException;
@@ -46,6 +50,9 @@ import org.xml.sax.SAXException;
public class MetadataExporter
{
protected static MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance().getMetadataSchemaService();
protected static MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService();
/**
* @param args
* @throws ParseException
@@ -102,25 +109,25 @@ public class MetadataExporter
// Save the schema definition(s)
saveSchema(context, xmlSerializer, schema);
MetadataField[] mdFields = null;
List<MetadataField> mdFields = null;
// If a single schema has been specified
if (schema != null && !"".equals(schema))
{
// Get the id of that schema
MetadataSchema mdSchema = MetadataSchema.find(context, schema);
MetadataSchema mdSchema = metadataSchemaService.find(context, schema);
if (mdSchema == null)
{
throw new RegistryExportException("no schema to export");
}
// Get the metadata fields only for the specified schema
mdFields = MetadataField.findAllInSchema(context, mdSchema.getSchemaID());
mdFields = metadataFieldService.findAllInSchema(context, mdSchema);
}
else
{
// Get the metadata fields for all the schemas
mdFields = MetadataField.findAll(context);
mdFields = metadataFieldService.findAll(context);
}
// Output the metadata fields
@@ -150,14 +157,14 @@ public class MetadataExporter
if (schema != null && !"".equals(schema))
{
// Find a single named schema
MetadataSchema mdSchema = MetadataSchema.find(context, schema);
MetadataSchema mdSchema = metadataSchemaService.find(context, schema);
saveSchema(xmlSerializer, mdSchema);
}
else
{
// Find all schemas
MetadataSchema[] mdSchemas = MetadataSchema.findAll(context);
List<MetadataSchema> mdSchemas = metadataSchemaService.findAll(context);
for (MetadataSchema mdSchema : mdSchemas)
{
@@ -292,16 +299,16 @@ public class MetadataExporter
private static String getSchemaName(Context context, MetadataField mdField) throws SQLException, RegistryExportException
{
// Get name from cache
String name = schemaMap.get(Integer.valueOf(mdField.getSchemaID()));
String name = schemaMap.get(mdField.getMetadataSchema().getSchemaID());
if (name == null)
{
// Name not retrieved before, so get the schema now
MetadataSchema mdSchema = MetadataSchema.find(context, mdField.getSchemaID());
MetadataSchema mdSchema = metadataSchemaService.find(context, mdField.getMetadataSchema().getSchemaID());
if (mdSchema != null)
{
name = mdSchema.getName();
schemaMap.put(Integer.valueOf(mdSchema.getSchemaID()), name);
schemaMap.put(mdSchema.getSchemaID(), name);
}
else
{

View File

@@ -25,6 +25,9 @@ import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.NonUniqueMetadataException;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataSchemaService;
import org.dspace.core.Context;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -58,6 +61,9 @@ import org.xml.sax.SAXException;
*/
public class MetadataImporter
{
protected static MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance().getMetadataSchemaService();
protected static MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService();
/** logging category */
private static final Logger log = LoggerFactory.getLogger(MetadataImporter.class);
@@ -173,14 +179,13 @@ public class MetadataImporter
}
// check to see if the schema already exists
MetadataSchema s = MetadataSchema.find(context, name);
MetadataSchema s = metadataSchemaService.find(context, name);
if (s == null)
{
// Schema does not exist - create
log.info("Registering Schema " + name + " (" + namespace + ")");
MetadataSchema schema = new MetadataSchema(namespace, name);
schema.create(context);
metadataSchemaService.create(context, name, namespace);
}
else
{
@@ -197,7 +202,7 @@ public class MetadataImporter
// Update the existing schema namespace and continue to type import
log.info("Updating Schema " + name + ": New namespace " + namespace);
s.setNamespace(namespace);
s.update(context);
metadataSchemaService.update(context, s);
}
else
{
@@ -236,14 +241,14 @@ public class MetadataImporter
// Find the matching schema object
MetadataSchema schemaObj = MetadataSchema.find(context, schema);
MetadataSchema schemaObj = metadataSchemaService.find(context, schema);
if (schemaObj == null)
{
throw new RegistryImportException("Schema '" + schema + "' is not registered and does not exist.");
}
MetadataField mf = MetadataField.findByElement(context, schemaObj.getSchemaID(), element, qualifier);
MetadataField mf = metadataFieldService.findByElement(context, schemaObj, element, qualifier);
if (mf != null)
{
// Metadata field already exists, skipping it
@@ -255,12 +260,8 @@ public class MetadataImporter
if(qualifier==null)
fieldName = schema + "." + element;
log.info("Registering metadata field " + fieldName);
MetadataField field = new MetadataField();
field.setSchemaID(schemaObj.getSchemaID());
field.setElement(element);
field.setQualifier(qualifier);
field.setScopeNote(scopeNote);
field.create(context);
MetadataField field = metadataFieldService.create(context, schemaObj, element, qualifier, scopeNote);
metadataFieldService.update(context, field);
}
/**

View File

@@ -10,6 +10,8 @@ package org.dspace.administer;
import java.io.File;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
@@ -20,9 +22,8 @@ import org.apache.log4j.Logger;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.NonUniqueMetadataException;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamFormatService;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.w3c.dom.Document;
@@ -48,6 +49,8 @@ public class RegistryLoader
/** log4j category */
private static Logger log = Logger.getLogger(RegistryLoader.class);
protected static BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance().getBitstreamFormatService();
/**
* For invoking via the command line
*
@@ -167,30 +170,32 @@ public class RegistryLoader
String[] extensions = getRepeatedElementData(node, "extension");
// Check if this format already exists in our registry (by mime type)
BitstreamFormat exists = BitstreamFormat.findByMIMEType(context, mimeType);
BitstreamFormat exists = bitstreamFormatService.findByMIMEType(context, mimeType);
// If not found by mimeType, check by short description (since this must also be unique)
if(exists==null)
{
exists = BitstreamFormat.findByShortDescription(context, shortDesc);
exists = bitstreamFormatService.findByShortDescription(context, shortDesc);
}
// If it doesn't exist, create it..otherwise skip it.
if(exists==null)
{
// Create the format object
BitstreamFormat format = BitstreamFormat.create(context);
BitstreamFormat format = bitstreamFormatService.create(context);
// Fill it out with the values
format.setMIMEType(mimeType);
format.setShortDescription(shortDesc);
bitstreamFormatService.setShortDescription(context, format, shortDesc);
format.setDescription(desc);
format.setSupportLevel(supportLevel);
format.setInternal(internal);
format.setExtensions(extensions);
ArrayList<String> extensionList = new ArrayList<>();
extensionList.addAll(Arrays.asList(extensions));
format.setExtensions(extensionList);
// Write to database
format.update();
bitstreamFormatService.update(context, format);
}
}

View File

@@ -28,8 +28,12 @@ import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.jdom.Element;
import org.jdom.output.XMLOutputter;
import org.w3c.dom.Document;
@@ -73,7 +77,11 @@ public class StructBuilder
/** a hashtable to hold metadata for the community being worked on */
private static Map<String, String> communityMap = new HashMap<String, String>();
protected static CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
protected static EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
/**
* Main method to be run from the command line to import a structure into
* DSpace
@@ -127,7 +135,7 @@ public class StructBuilder
Context context = new Context();
// set the context
context.setCurrentUser(EPerson.findByEmail(context, eperson));
context.setCurrentUser(ePersonService.findByEmail(context, eperson));
// load the XML
Document document = loadXML(file);
@@ -390,15 +398,15 @@ public class StructBuilder
// create the community or sub community
if (parent != null)
{
community = parent.createSubcommunity();
community = communityService.create(parent, context);
}
else
{
community = Community.create(null, context);
community = communityService.create(null, context);
}
// default the short description to be an empty string
community.setMetadata("short_description", " ");
communityService.setMetadata(context, community, "short_description", " ");
// now update the metadata
Node tn = communities.item(i);
@@ -407,7 +415,7 @@ public class StructBuilder
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
if (nl.getLength() == 1)
{
community.setMetadata(entry.getValue(), getStringValue(nl.item(0)));
communityService.setMetadata(context, community, entry.getValue(), getStringValue(nl.item(0)));
}
}
@@ -420,7 +428,7 @@ public class StructBuilder
// difficult
// to isolate the community that already exists without hitting
// the database directly.
community.update();
communityService.update(context, community);
// build the element with the handle that identifies the new
// community
@@ -433,34 +441,34 @@ public class StructBuilder
element.setAttribute("identifier", community.getHandle());
Element nameElement = new Element("name");
nameElement.setText(community.getMetadata("name"));
nameElement.setText(communityService.getMetadata(community, "name"));
element.addContent(nameElement);
if (community.getMetadata("short_description") != null)
if (communityService.getMetadata(community, "short_description") != null)
{
Element descriptionElement = new Element("description");
descriptionElement.setText(community.getMetadata("short_description"));
descriptionElement.setText(communityService.getMetadata(community, "short_description"));
element.addContent(descriptionElement);
}
if (community.getMetadata("introductory_text") != null)
if (communityService.getMetadata(community, "introductory_text") != null)
{
Element introElement = new Element("intro");
introElement.setText(community.getMetadata("introductory_text"));
introElement.setText(communityService.getMetadata(community, "introductory_text"));
element.addContent(introElement);
}
if (community.getMetadata("copyright_text") != null)
if (communityService.getMetadata(community, "copyright_text") != null)
{
Element copyrightElement = new Element("copyright");
copyrightElement.setText(community.getMetadata("copyright_text"));
copyrightElement.setText(communityService.getMetadata(community, "copyright_text"));
element.addContent(copyrightElement);
}
if (community.getMetadata("side_bar_text") != null)
if (communityService.getMetadata(community, "side_bar_text") != null)
{
Element sidebarElement = new Element("sidebar");
sidebarElement.setText(community.getMetadata("side_bar_text"));
sidebarElement.setText(communityService.getMetadata(community, "side_bar_text"));
element.addContent(sidebarElement);
}
@@ -506,10 +514,10 @@ public class StructBuilder
for (int i = 0; i < collections.getLength(); i++)
{
Element element = new Element("collection");
Collection collection = parent.createCollection();
Collection collection = collectionService.create(context, parent);
// default the short description to the empty string
collection.setMetadata("short_description", " ");
collectionService.setMetadata(context, collection, "short_description", " ");
// import the rest of the metadata
Node tn = collections.item(i);
@@ -518,57 +526,57 @@ public class StructBuilder
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
if (nl.getLength() == 1)
{
collection.setMetadata(entry.getValue(), getStringValue(nl.item(0)));
collectionService.setMetadata(context, collection, entry.getValue(), getStringValue(nl.item(0)));
}
}
collection.update();
collectionService.update(context, collection);
element.setAttribute("identifier", collection.getHandle());
Element nameElement = new Element("name");
nameElement.setText(collection.getMetadata("name"));
nameElement.setText(collectionService.getMetadata(collection, "name"));
element.addContent(nameElement);
if (collection.getMetadata("short_description") != null)
if (collectionService.getMetadata(collection, "short_description") != null)
{
Element descriptionElement = new Element("description");
descriptionElement.setText(collection.getMetadata("short_description"));
descriptionElement.setText(collectionService.getMetadata(collection, "short_description"));
element.addContent(descriptionElement);
}
if (collection.getMetadata("introductory_text") != null)
if (collectionService.getMetadata(collection, "introductory_text") != null)
{
Element introElement = new Element("intro");
introElement.setText(collection.getMetadata("introductory_text"));
introElement.setText(collectionService.getMetadata(collection, "introductory_text"));
element.addContent(introElement);
}
if (collection.getMetadata("copyright_text") != null)
if (collectionService.getMetadata(collection, "copyright_text") != null)
{
Element copyrightElement = new Element("copyright");
copyrightElement.setText(collection.getMetadata("copyright_text"));
copyrightElement.setText(collectionService.getMetadata(collection, "copyright_text"));
element.addContent(copyrightElement);
}
if (collection.getMetadata("side_bar_text") != null)
if (collectionService.getMetadata(collection, "side_bar_text") != null)
{
Element sidebarElement = new Element("sidebar");
sidebarElement.setText(collection.getMetadata("side_bar_text"));
sidebarElement.setText(collectionService.getMetadata(collection, "side_bar_text"));
element.addContent(sidebarElement);
}
if (collection.getMetadata("license") != null)
if (collectionService.getMetadata(collection, "license") != null)
{
Element sidebarElement = new Element("license");
sidebarElement.setText(collection.getMetadata("license"));
sidebarElement.setText(collectionService.getMetadata(collection, "license"));
element.addContent(sidebarElement);
}
if (collection.getMetadata("provenance_description") != null)
if (collectionService.getMetadata(collection, "provenance_description") != null)
{
Element sidebarElement = new Element("provenance");
sidebarElement.setText(collection.getMetadata("provenance_description"));
sidebarElement.setText(collectionService.getMetadata(collection, "provenance_description"));
element.addContent(sidebarElement);
}

View File

@@ -8,7 +8,6 @@
package org.dspace.app.bulkedit;
import org.dspace.content.Item;
import org.dspace.content.Metadatum;
import org.dspace.content.Collection;
import java.util.ArrayList;
@@ -25,16 +24,16 @@ public class BulkEditChange
private Item item;
/** The List of hashtables with the new elements */
private List<Metadatum> adds;
private List<BulkEditMetadataValue> adds;
/** The List of hashtables with the removed elements */
private List<Metadatum> removes;
private List<BulkEditMetadataValue> removes;
/** The List of hashtables with the unchanged elements */
private List<Metadatum> constant;
private List<BulkEditMetadataValue> constant;
/** The List of the complete set of new values (constant + adds) */
private List<Metadatum> complete;
private List<BulkEditMetadataValue> complete;
/** The list of old collections the item used to be mapped to */
private List<Collection> oldMappedCollections;
@@ -77,12 +76,12 @@ public class BulkEditChange
newOwningCollection = null;
// Initialise the arrays
adds = new ArrayList<Metadatum>();
removes = new ArrayList<Metadatum>();
constant = new ArrayList<Metadatum>();
complete = new ArrayList<Metadatum>();
oldMappedCollections = new ArrayList<Collection>();
newMappedCollections = new ArrayList<Collection>();
adds = new ArrayList<>();
removes = new ArrayList<>();
constant = new ArrayList<>();
complete = new ArrayList<>();
oldMappedCollections = new ArrayList<>();
newMappedCollections = new ArrayList<>();
}
/**
@@ -98,12 +97,12 @@ public class BulkEditChange
empty = true;
// Initialise the arrays
adds = new ArrayList<Metadatum>();
removes = new ArrayList<Metadatum>();
constant = new ArrayList<Metadatum>();
complete = new ArrayList<Metadatum>();
oldMappedCollections = new ArrayList<Collection>();
newMappedCollections = new ArrayList<Collection>();
adds = new ArrayList<>();
removes = new ArrayList<>();
constant = new ArrayList<>();
complete = new ArrayList<>();
oldMappedCollections = new ArrayList<>();
newMappedCollections = new ArrayList<>();
}
/**
@@ -122,7 +121,7 @@ public class BulkEditChange
*
* @param dcv The value to add
*/
public void registerAdd(Metadatum dcv)
public void registerAdd(BulkEditMetadataValue dcv)
{
// Add the added value
adds.add(dcv);
@@ -135,7 +134,7 @@ public class BulkEditChange
*
* @param dcv The value to remove
*/
public void registerRemove(Metadatum dcv)
public void registerRemove(BulkEditMetadataValue dcv)
{
// Add the removed value
removes.add(dcv);
@@ -147,7 +146,7 @@ public class BulkEditChange
*
* @param dcv The value to keep unchanged
*/
public void registerConstant(Metadatum dcv)
public void registerConstant(BulkEditMetadataValue dcv)
{
// Add the removed value
constant.add(dcv);
@@ -241,7 +240,7 @@ public class BulkEditChange
*
* @return the list of elements and their values that have been added.
*/
public List<Metadatum> getAdds()
public List<BulkEditMetadataValue> getAdds()
{
// Return the array
return adds;
@@ -252,7 +251,7 @@ public class BulkEditChange
*
* @return the list of elements and their values that have been removed.
*/
public List<Metadatum> getRemoves()
public List<BulkEditMetadataValue> getRemoves()
{
// Return the array
return removes;
@@ -263,7 +262,7 @@ public class BulkEditChange
*
* @return the list of unchanged values
*/
public List<Metadatum> getConstant()
public List<BulkEditMetadataValue> getConstant()
{
// Return the array
return constant;
@@ -274,7 +273,7 @@ public class BulkEditChange
*
* @return the list of all values
*/
public List<Metadatum> getComplete()
public List<BulkEditMetadataValue> getComplete()
{
// Return the array
return complete;
@@ -404,4 +403,4 @@ public class BulkEditChange
{
return !empty;
}
}
}

View File

@@ -0,0 +1,83 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
/**
* Value class used for metadata value edits used by the bulk edit.
*
* @author kevinvandevelde at atmire.com
*/
public class BulkEditMetadataValue {
private String schema;
private String element;
private String qualifier;
private String language;
private String value;
private String authority;
private int confidence;
public BulkEditMetadataValue() {
}
public void setSchema(String schema) {
this.schema = schema;
}
public void setElement(String element) {
this.element = element;
}
public void setQualifier(String qualifier) {
this.qualifier = qualifier;
}
public void setLanguage(String language) {
this.language = language;
}
public void setValue(String value) {
this.value = value;
}
public void setAuthority(String authority) {
this.authority = authority;
}
public void setConfidence(int confidence) {
this.confidence = confidence;
}
public String getSchema() {
return schema;
}
public String getElement() {
return element;
}
public String getQualifier() {
return qualifier;
}
public String getLanguage() {
return language;
}
public String getValue() {
return value;
}
public String getAuthority() {
return authority;
}
public int getConfidence() {
return confidence;
}
}

View File

@@ -7,13 +7,17 @@
*/
package org.dspace.app.bulkedit;
import org.apache.commons.lang3.StringUtils;
import org.dspace.authority.AuthorityValue;
import org.dspace.app.bulkedit.DSpaceCSVLine;
import org.dspace.app.bulkedit.MetadataImport;
import org.dspace.app.bulkedit.MetadataImportInvalidHeadingException;
import org.dspace.content.Collection;
import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityValueService;
import org.dspace.content.*;
import org.dspace.content.Collection;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataSchemaService;
import org.dspace.content.authority.Choices;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
@@ -39,38 +43,43 @@ import java.io.*;
public class DSpaceCSV implements Serializable
{
/** The headings of the CSV file */
private List<String> headings;
protected List<String> headings;
/** An array list of CSV lines */
private List<DSpaceCSVLine> lines;
protected List<DSpaceCSVLine> lines;
/** A counter of how many CSV lines this object holds */
private int counter;
protected int counter;
/** The value separator (defaults to double pipe '||') */
protected static String valueSeparator;
protected String valueSeparator;
/** The value separator in an escaped form for using in regexes */
protected static String escapedValueSeparator;
protected String escapedValueSeparator;
/** The field separator (defaults to comma) */
protected static String fieldSeparator;
protected String fieldSeparator;
/** The field separator in an escaped form for using in regexes */
protected static String escapedFieldSeparator;
protected String escapedFieldSeparator;
/** The authority separator (defaults to double colon '::') */
protected static String authoritySeparator;
protected String authoritySeparator;
/** The authority separator in an escaped form for using in regexes */
protected static String escapedAuthoritySeparator;
protected String escapedAuthoritySeparator;
protected transient final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
protected transient final MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance().getMetadataSchemaService();
protected transient final MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService();
protected transient final AuthorityValueService authorityValueService = AuthorityServiceFactory.getInstance().getAuthorityValueService();
/** Whether to export all metadata such as handles and provenance information */
private boolean exportAll;
protected boolean exportAll;
/** A list of metadata elements to ignore */
private Map<String, String> ignore;
protected Map<String, String> ignore;
/**
@@ -135,7 +144,7 @@ public class DSpaceCSV implements Serializable
else if (!"id".equals(element))
{
String authorityPrefix = "";
AuthorityValue authorityValueType = MetadataImport.getAuthorityValueType(element);
AuthorityValue authorityValueType = authorityValueService.getAuthorityValueType(element);
if (authorityValueType != null) {
String authorityType = authorityValueType.getAuthorityType();
authorityPrefix = element.substring(0, authorityType.length() + 1);
@@ -160,7 +169,7 @@ public class DSpaceCSV implements Serializable
}
// Check that the scheme exists
MetadataSchema foundSchema = MetadataSchema.find(c, metadataSchema);
MetadataSchema foundSchema = metadataSchemaService.find(c, metadataSchema);
if (foundSchema == null) {
throw new MetadataImportInvalidHeadingException(clean[0],
MetadataImportInvalidHeadingException.SCHEMA,
@@ -168,8 +177,7 @@ public class DSpaceCSV implements Serializable
}
// Check that the metadata element exists in the schema
int schemaID = foundSchema.getSchemaID();
MetadataField foundField = MetadataField.findByElement(c, schemaID, metadataElement, metadataQualifier);
MetadataField foundField = metadataFieldService.findByElement(c, foundSchema, metadataElement, metadataQualifier);
if (foundField == null) {
throw new MetadataImportInvalidHeadingException(clean[0],
MetadataImportInvalidHeadingException.ELEMENT,
@@ -185,7 +193,7 @@ public class DSpaceCSV implements Serializable
StringBuilder lineBuilder = new StringBuilder();
String lineRead;
while ((lineRead = input.readLine()) != null)
while (StringUtils.isNotBlank(lineRead = input.readLine()))
{
if (lineBuilder.length() > 0) {
// Already have a previously read value - add this line
@@ -238,7 +246,7 @@ public class DSpaceCSV implements Serializable
/**
* Initialise this class with values from dspace.cfg
*/
private void init()
protected void init()
{
// Set the value separator
setValueSeparator();
@@ -250,16 +258,16 @@ public class DSpaceCSV implements Serializable
setAuthoritySeparator();
// Create the headings
headings = new ArrayList<String>();
headings = new ArrayList<>();
// Create the blank list of items
lines = new ArrayList<DSpaceCSVLine>();
lines = new ArrayList<>();
// Initialise the counter
counter = 0;
// Set the metadata fields to ignore
ignore = new HashMap<String, String>();
ignore = new HashMap<>();
String toIgnore = ConfigurationManager.getProperty("bulkedit", "ignore-on-export");
if ((toIgnore == null) || ("".equals(toIgnore.trim())))
{
@@ -412,7 +420,7 @@ public class DSpaceCSV implements Serializable
line.add("collection", owningCollectionHandle);
// Add in any mapped collections
Collection[] collections = i.getCollections();
List<Collection> collections = i.getCollections();
for (Collection c : collections)
{
// Only add if it is not the owning collection
@@ -423,33 +431,35 @@ public class DSpaceCSV implements Serializable
}
// Populate it
Metadatum md[] = i.getMetadata(Item.ANY, Item.ANY, Item.ANY, Item.ANY);
for (Metadatum value : md)
List<MetadataValue> md = itemService.getMetadata(i, Item.ANY, Item.ANY, Item.ANY, Item.ANY);
for (MetadataValue value : md)
{
MetadataField metadataField = value.getMetadataField();
MetadataSchema metadataSchema = metadataField.getMetadataSchema();
// Get the key (schema.element)
String key = value.schema + "." + value.element;
String key = metadataSchema.getName() + "." + metadataField.getElement();
// Add the qualifier if there is one (schema.element.qualifier)
if (value.qualifier != null)
if (metadataField.getQualifier() != null)
{
key = key + "." + value.qualifier;
key = key + "." + metadataField.getQualifier();
}
// Add the language if there is one (schema.element.qualifier[langauge])
//if ((value.language != null) && (!"".equals(value.language)))
if (value.language != null)
if (value.getLanguage() != null)
{
key = key + "[" + value.language + "]";
key = key + "[" + value.getLanguage() + "]";
}
// Store the item
if (exportAll || okToExport(value))
if (exportAll || okToExport(metadataField))
{
// Add authority and confidence if authority is not null
String mdValue = value.value;
if (value.authority != null && !"".equals(value.authority))
String mdValue = value.getValue();
if (value.getAuthority() != null && !"".equals(value.getAuthority()))
{
mdValue += authoritySeparator + value.authority + authoritySeparator + value.confidence;
mdValue += authoritySeparator + value.getAuthority() + authoritySeparator + (value.getConfidence() != -1 ? value.getConfidence() : Choices.CF_ACCEPTED);
}
line.add(key, mdValue);
if (!headings.contains(key))
@@ -481,7 +491,7 @@ public class DSpaceCSV implements Serializable
// Split up on field separator
String[] parts = line.split(escapedFieldSeparator);
ArrayList<String> bits = new ArrayList<String>();
ArrayList<String> bits = new ArrayList<>();
bits.addAll(Arrays.asList(parts));
// Merge parts with embedded separators
@@ -544,7 +554,7 @@ public class DSpaceCSV implements Serializable
{
try
{
csvLine = new DSpaceCSVLine(Integer.parseInt(id));
csvLine = new DSpaceCSVLine(UUID.fromString(id));
}
catch (NumberFormatException nfe)
{
@@ -610,8 +620,9 @@ public class DSpaceCSV implements Serializable
// Create the headings line
String[] csvLines = new String[counter + 1];
csvLines[0] = "id" + fieldSeparator + "collection";
Collections.sort(headings);
for (String value : headings)
List<String> headingsCopy = new ArrayList<>(headings);
Collections.sort(headingsCopy);
for (String value : headingsCopy)
{
csvLines[0] = csvLines[0] + fieldSeparator + value;
}
@@ -620,7 +631,7 @@ public class DSpaceCSV implements Serializable
int c = 1;
while (i.hasNext())
{
csvLines[c++] = i.next().toCSV(headings);
csvLines[c++] = i.next().toCSV(headingsCopy, fieldSeparator);
}
return csvLines;
@@ -655,13 +666,13 @@ public class DSpaceCSV implements Serializable
* @param md The Metadatum to examine
* @return Whether or not it is OK to export this element
*/
private final boolean okToExport(Metadatum md)
protected boolean okToExport(MetadataField md)
{
// Now compare with the list to ignore
String key = md.schema + "." + md.element;
if (md.qualifier != null)
String key = md.getMetadataSchema().getName() + "." + md.getElement();
if (md.getQualifier() != null)
{
key += "." + md.qualifier;
key += "." + md.getQualifier();
}
if (ignore.get(key) != null) {
return false;
@@ -686,10 +697,11 @@ public class DSpaceCSV implements Serializable
*
* @return The formatted String as a csv
*/
@Override
public final String toString()
{
// Return the csv as one long string
StringBuffer csvLines = new StringBuffer();
StringBuilder csvLines = new StringBuilder();
String[] lines = this.getCSVLinesAsStringArray();
for (String line : lines)
{
@@ -697,4 +709,12 @@ public class DSpaceCSV implements Serializable
}
return csvLines.toString();
}
public String getAuthoritySeparator() {
return authoritySeparator;
}
public String getEscapedAuthoritySeparator() {
return escapedAuthoritySeparator;
}
}

View File

@@ -7,12 +7,12 @@
*/
package org.dspace.app.bulkedit;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityValueService;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.*;
/**
* Utility class to store a line from a CSV file
@@ -22,21 +22,47 @@ import java.util.Set;
public class DSpaceCSVLine implements Serializable
{
/** The item id of the item represented by this line. -1 is for a new item */
private int id;
private final UUID id;
/** The elements in this line in a hashtable, keyed by the metadata type */
private Map<String, ArrayList> items;
private final Map<String, ArrayList> items;
protected transient final AuthorityValueService authorityValueService
= AuthorityServiceFactory.getInstance().getAuthorityValueService();
/** ensuring that the order-sensible columns of the csv are processed in the correct order */
private transient final Comparator<? super String> headerComparator = new Comparator<String>() {
@Override
public int compare(String md1, String md2) {
// The metadata coming from an external source should be processed after the others
AuthorityValue source1 = authorityValueService.getAuthorityValueType(md1);
AuthorityValue source2 = authorityValueService.getAuthorityValueType(md2);
int compare;
if (source1 == null && source2 != null) {
compare = -1;
}
else if (source1 != null && source2 == null) {
compare = 1;
} else {
// the order of the rest does not matter
compare = md1.compareTo(md2);
}
return compare;
}
};
/**
* Create a new CSV line
*
* @param itemId The item ID of the line
*/
public DSpaceCSVLine(int itemId)
public DSpaceCSVLine(UUID itemId)
{
// Store the ID + separator, and initialise the hashtable
this.id = itemId;
items = new HashMap<String, ArrayList>();
items = new TreeMap<>(headerComparator);
// this.items = new HashMap<String, ArrayList>();
}
/**
@@ -44,9 +70,9 @@ public class DSpaceCSVLine implements Serializable
*/
public DSpaceCSVLine()
{
// Set the ID to be -1, and initialise the hashtable
this.id = -1;
this.items = new HashMap<String, ArrayList>();
// Set the ID to be null, and initialise the hashtable
this.id = null;
this.items = new TreeMap<>(headerComparator);
}
/**
@@ -54,7 +80,7 @@ public class DSpaceCSVLine implements Serializable
*
* @return The item ID
*/
public int getID()
public UUID getID()
{
// Return the ID
return id;
@@ -124,24 +150,25 @@ public class DSpaceCSVLine implements Serializable
* Write this line out as a CSV formatted string, in the order given by the headings provided
*
* @param headings The headings which define the order the elements must be presented in
* @param fieldSeparator
* @return The CSV formatted String
*/
protected String toCSV(List<String> headings)
protected String toCSV(List<String> headings, String fieldSeparator)
{
StringBuilder bits = new StringBuilder();
// Add the id
bits.append("\"").append(id).append("\"").append(DSpaceCSV.fieldSeparator);
bits.append(valueToCSV(items.get("collection")));
bits.append("\"").append(id).append("\"").append(fieldSeparator);
bits.append(valueToCSV(items.get("collection"), fieldSeparator));
// Add the rest of the elements
for (String heading : headings)
{
bits.append(DSpaceCSV.fieldSeparator);
bits.append(fieldSeparator);
List<String> values = items.get(heading);
if (values != null && !"collection".equals(heading))
{
bits.append(valueToCSV(values));
bits.append(valueToCSV(values, fieldSeparator));
}
}
@@ -152,9 +179,10 @@ public class DSpaceCSVLine implements Serializable
* Internal method to create a CSV formatted String joining a given set of elements
*
* @param values The values to create the string from
* @param valueSeparator
* @return The line as a CSV formatted String
*/
protected String valueToCSV(List<String> values)
protected String valueToCSV(List<String> values, String valueSeparator)
{
// Check there is some content
if (values == null)
@@ -177,7 +205,7 @@ public class DSpaceCSVLine implements Serializable
{
if (str.length() > 0)
{
str.append(DSpaceCSV.valueSeparator);
str.append(valueSeparator);
}
str.append(value);

View File

@@ -7,15 +7,19 @@
*/
package org.dspace.app.bulkedit;
import com.google.common.collect.Iterators;
import org.apache.commons.cli.*;
import org.dspace.content.*;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
import org.dspace.handle.factory.HandleServiceFactory;
import java.util.ArrayList;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
/**
@@ -26,10 +30,16 @@ import java.util.List;
public class MetadataExport
{
/** The items to export */
private ItemIterator toExport;
protected Iterator<Item> toExport;
protected ItemService itemService;
/** Whether to export all metadata, or just normally edited metadata */
private boolean exportAll;
protected boolean exportAll;
protected MetadataExport() {
itemService = ContentServiceFactory.getInstance().getItemService();
}
/**
* Set up a new metadata export
@@ -38,7 +48,7 @@ public class MetadataExport
* @param toExport The ItemIterator of items to export
* @param exportAll whether to export all metadata or not (include handle, provenance etc)
*/
public MetadataExport(Context c, ItemIterator toExport, boolean exportAll)
public MetadataExport(Context c, Iterator<Item> toExport, boolean exportAll)
{
// Store the export settings
this.toExport = toExport;
@@ -57,7 +67,7 @@ public class MetadataExport
try
{
// Try to export the community
this.toExport = new ItemIterator(c, buildFromCommunity(toExport, new ArrayList<Integer>(), 0));
this.toExport = buildFromCommunity(c, toExport, new ArrayList<Integer>(), 0);
this.exportAll = exportAll;
}
catch (SQLException sqle)
@@ -78,11 +88,12 @@ public class MetadataExport
* @return The list of item ids
* @throws SQLException
*/
private List<Integer> buildFromCommunity(Community community, List<Integer> itemIDs, int indent)
protected Iterator<Item> buildFromCommunity(Context context, Community community, List<Integer> itemIDs, int indent)
throws SQLException
{
// Add all the collections
Collection[] collections = community.getCollections();
List<Collection> collections = community.getCollections();
Iterator<Item> result = null;
for (Collection collection : collections)
{
for (int i = 0; i < indent; i++)
@@ -90,30 +101,27 @@ public class MetadataExport
System.out.print(" ");
}
ItemIterator items = collection.getAllItems();
while (items.hasNext())
Iterator<Item> items = itemService.findByCollection(context, collection);
if(result == null)
{
int id = items.next().getID();
// Only add if not already included (so mapped items only appear once)
if (!itemIDs.contains(id))
{
itemIDs.add(id);
}
result = items;
}else{
result = Iterators.concat(result, items);
}
}
}
// Add all the sub-communities
Community[] communities = community.getSubcommunities();
List<Community> communities = community.getSubcommunities();
for (Community subCommunity : communities)
{
for (int i = 0; i < indent; i++)
{
System.out.print(" ");
}
buildFromCommunity(subCommunity, itemIDs, indent + 1);
buildFromCommunity(context, subCommunity, itemIDs, indent + 1);
}
return itemIDs;
return result;
}
/**
@@ -208,22 +216,24 @@ public class MetadataExport
c.turnOffAuthorisationSystem();
// The things we'll export
ItemIterator toExport = null;
Iterator<Item> toExport = null;
MetadataExport exporter = null;
// Export everything?
boolean exportAll = line.hasOption('a');
ContentServiceFactory contentServiceFactory = ContentServiceFactory.getInstance();
// Check we have an item OK
ItemService itemService = contentServiceFactory.getItemService();
if (!line.hasOption('i'))
{
System.out.println("Exporting whole repository WARNING: May take some time!");
exporter = new MetadataExport(c, Item.findAll(c), exportAll);
exporter = new MetadataExport(c, itemService.findAll(c), exportAll);
}
else
{
String handle = line.getOptionValue('i');
DSpaceObject dso = HandleManager.resolveToObject(c, handle);
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(c, handle);
if (dso == null)
{
System.err.println("Item '" + handle + "' does not resolve to an item in your repository!");
@@ -233,15 +243,15 @@ public class MetadataExport
if (dso.getType() == Constants.ITEM)
{
System.out.println("Exporting item '" + dso.getName() + "' (" + handle + ")");
List<Integer> item = new ArrayList<Integer>();
item.add(dso.getID());
exporter = new MetadataExport(c, new ItemIterator(c, item), exportAll);
List<Item> item = new ArrayList<>();
item.add((Item) dso);
exporter = new MetadataExport(c, item.iterator(), exportAll);
}
else if (dso.getType() == Constants.COLLECTION)
{
System.out.println("Exporting collection '" + dso.getName() + "' (" + handle + ")");
Collection collection = (Collection)dso;
toExport = collection.getAllItems();
toExport = itemService.findByCollection(c, collection);
exporter = new MetadataExport(c, toExport, exportAll);
}
else if (dso.getType() == Constants.COMMUNITY)

View File

@@ -8,34 +8,38 @@
package org.dspace.app.bulkedit;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.AuthorityValueFinder;
import org.dspace.authority.AuthorityValueGenerator;
import org.apache.commons.cli.*;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityValueService;
import org.dspace.content.*;
import org.dspace.content.Collection;
import org.dspace.content.authority.Choices;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.InstallItemService;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.WorkspaceItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.Constants;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.LogManager;
import org.dspace.handle.HandleManager;
import org.dspace.eperson.EPerson;
import org.dspace.workflow.WorkflowManager;
import org.dspace.xmlworkflow.XmlWorkflowManager;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.workflow.WorkflowService;
import org.dspace.workflow.factory.WorkflowServiceFactory;
import java.util.ArrayList;
import java.util.*;
import java.io.File;
import java.io.InputStreamReader;
import java.io.BufferedReader;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Metadata importer to allow the batch import of metadata from a file
@@ -54,19 +58,25 @@ public class MetadataImport
List<DSpaceCSVLine> toImport;
/** The authority controlled fields */
private static Set<String> authorityControlled;
protected static Set<String> authorityControlled;
static
{
setAuthorizedMetadataFields();
}
/** The prefix of the authority controlled field */
private static final String AC_PREFIX = "authority.controlled.";
protected static final String AC_PREFIX = "authority.controlled.";
/** Logger */
private static final Logger log = Logger.getLogger(MetadataImport.class);
protected static final Logger log = Logger.getLogger(MetadataImport.class);
private AuthorityValueFinder authorityValueFinder = new AuthorityValueFinder();
protected final AuthorityValueService authorityValueService;
protected final ItemService itemService;
protected final InstallItemService installItemService;
protected final CollectionService collectionService;
protected final HandleService handleService;
protected final WorkspaceItemService workspaceItemService;
/**
* Create an instance of the metadata importer. Requires a context and an array of CSV lines
@@ -81,6 +91,12 @@ public class MetadataImport
this.c = c;
csv = toImport;
this.toImport = toImport.getCSVLines();
installItemService = ContentServiceFactory.getInstance().getInstallItemService();
itemService = ContentServiceFactory.getInstance().getItemService();
collectionService = ContentServiceFactory.getInstance().getCollectionService();
handleService = HandleServiceFactory.getInstance().getHandleService();
authorityValueService = AuthorityServiceFactory.getInstance().getAuthorityValueService();
workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService();
}
/**
@@ -110,19 +126,19 @@ public class MetadataImport
for (DSpaceCSVLine line : toImport)
{
// Get the DSpace item to compare with
int id = line.getID();
UUID id = line.getID();
// Is there an action column?
if (csv.hasActions() && (!"".equals(line.getAction())) && (id == -1))
if (csv.hasActions() && (!"".equals(line.getAction())) && (id == null))
{
throw new MetadataImportException("'action' not allowed for new items!");
}
// Is this a new item?
if (id != -1)
if (id != null)
{
// Get the item
Item item = Item.find(c, id);
Item item = itemService.find(c, id);
if (item == null)
{
throw new MetadataImportException("Unknown item ID " + id);
@@ -140,7 +156,7 @@ public class MetadataImport
{
throw new MetadataImportException("Missing collection from item " + item.getHandle());
}
Collection[] actualCollections = item.getCollections();
List<Collection> actualCollections = item.getCollections();
compare(item, collections, actualCollections, whatHasChanged, change);
}
@@ -157,7 +173,7 @@ public class MetadataImport
{
for (int i=0; i<fromCSV.length; i++)
{
int pos = fromCSV[i].indexOf(DSpaceCSV.authoritySeparator);
int pos = fromCSV[i].indexOf(csv.getAuthoritySeparator());
if (pos > -1)
{
fromCSV[i] = fromCSV[i].substring(0, pos);
@@ -166,7 +182,7 @@ public class MetadataImport
}
// Compare
compare(item, fromCSV, change, md, whatHasChanged);
compare(item, fromCSV, change, md, whatHasChanged, line);
}
}
@@ -187,12 +203,12 @@ public class MetadataImport
}
// Remove the item
Collection[] owners = item.getCollections();
List<Collection> owners = item.getCollections();
for (Collection owner : owners)
{
if (change)
{
owner.removeItem(item);
collectionService.removeItem(c, owner, item);
}
}
whatHasChanged.setDeleted();
@@ -204,7 +220,7 @@ public class MetadataImport
{
if (change)
{
item.withdraw();
itemService.withdraw(c, item);
}
whatHasChanged.setWithdrawn();
}
@@ -216,7 +232,7 @@ public class MetadataImport
{
if (change)
{
item.reinstate();
itemService.reinstate(c, item);
}
whatHasChanged.setReinstated();
}
@@ -258,7 +274,7 @@ public class MetadataImport
{
for (int i=0; i<fromCSV.length; i++)
{
int pos = fromCSV[i].indexOf(DSpaceCSV.authoritySeparator);
int pos = fromCSV[i].indexOf(csv.getAuthoritySeparator());
if (pos > -1)
{
fromCSV[i] = fromCSV[i].substring(0, pos);
@@ -286,7 +302,7 @@ public class MetadataImport
try
{
// Resolve the handle to the collection
collection = (Collection)HandleManager.resolveToObject(c, handle);
collection = (Collection) handleService.resolveToObject(c, handle);
// Check it resolved OK
if (collection == null)
@@ -314,7 +330,7 @@ public class MetadataImport
boolean first = true;
for (String handle : collections)
{
Collection extra = (Collection)HandleManager.resolveToObject(c, handle);
Collection extra = (Collection) handleService.resolveToObject(c, handle);
if (first)
{
whatHasChanged.setOwningCollection(extra);
@@ -331,42 +347,35 @@ public class MetadataImport
{
// Create the item
String collectionHandle = line.get("collection").get(0);
collection = (Collection)HandleManager.resolveToObject(c, collectionHandle);
WorkspaceItem wsItem = WorkspaceItem.create(c, collection, useTemplate);
collection = (Collection) handleService.resolveToObject(c, collectionHandle);
WorkspaceItem wsItem = workspaceItemService.create(c, collection, useTemplate);
Item item = wsItem.getItem();
// Add the metadata to the item
for (Metadatum dcv : whatHasChanged.getAdds())
for (BulkEditMetadataValue dcv : whatHasChanged.getAdds())
{
item.addMetadata(dcv.schema,
dcv.element,
dcv.qualifier,
dcv.language,
dcv.value,
dcv.authority,
dcv.confidence);
itemService.addMetadata(c, item, dcv.getSchema(),
dcv.getElement(),
dcv.getQualifier(),
dcv.getLanguage(),
dcv.getValue(),
dcv.getAuthority(),
dcv.getConfidence());
}
// Should the workflow be used?
if(useWorkflow){
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("xmlworkflow")) {
if (workflowNotify) {
XmlWorkflowManager.start(c, wsItem);
} else {
XmlWorkflowManager.startWithoutNotify(c, wsItem);
}
WorkflowService workflowService = WorkflowServiceFactory.getInstance().getWorkflowService();
if (workflowNotify) {
workflowService.start(c, wsItem);
} else {
if (workflowNotify) {
WorkflowManager.start(c, wsItem);
} else {
WorkflowManager.startWithoutNotify(c, wsItem);
}
workflowService.startWithoutNotify(c, wsItem);
}
}
else
{
// Install the item
InstallItem.installItem(c, wsItem);
installItemService.installItem(c, wsItem);
}
// Add to extra collections
@@ -375,13 +384,13 @@ public class MetadataImport
for (int i = 1; i < collections.size(); i++)
{
String handle = collections.get(i);
Collection extra = (Collection)HandleManager.resolveToObject(c, handle);
extra.addItem(item);
Collection extra = (Collection) handleService.resolveToObject(c, handle);
collectionService.addItem(c, extra, item);
}
}
// Commit changes to the object
c.commit();
// c.commit();
whatHasChanged.setItem(item);
}
@@ -412,11 +421,12 @@ public class MetadataImport
* @param md The element to compare
* @param changes The changes object to populate
*
* @param line
* @throws SQLException if there is a problem accessing a Collection from the database, from its handle
* @throws AuthorizeException if there is an authorization problem with permissions
*/
private void compare(Item item, String[] fromCSV, boolean change,
String md, BulkEditChange changes) throws SQLException, AuthorizeException
protected void compare(Item item, String[] fromCSV, boolean change,
String md, BulkEditChange changes, DSpaceCSVLine line) throws SQLException, AuthorizeException
{
// Log what metadata element we're looking at
String all = "";
@@ -443,7 +453,7 @@ public class MetadataImport
language = bits[1].substring(0, bits[1].length() - 1);
}
AuthorityValue fromAuthority = getAuthorityValueType(md);
AuthorityValue fromAuthority = authorityValueService.getAuthorityValueType(md);
if (md.indexOf(':') > 0) {
md = md.substring(md.indexOf(':') + 1);
}
@@ -473,33 +483,34 @@ public class MetadataImport
",looking_for_element=" + element +
",looking_for_qualifier=" + qualifier +
",looking_for_language=" + language));
Metadatum[] current = item.getMetadata(schema, element, qualifier, language);
String[] dcvalues = new String[current.length];
int i = 0;
for (Metadatum dcv : current)
{
if (dcv.authority == null || !isAuthorityControlledField(md))
{
dcvalues[i] = dcv.value;
String[] dcvalues;
if(fromAuthority==null) {
List<MetadataValue> current = itemService.getMetadata(item, schema, element, qualifier, language);
dcvalues = new String[current.size()];
int i = 0;
for (MetadataValue dcv : current) {
if (dcv.getAuthority() == null || !isAuthorityControlledField(md)) {
dcvalues[i] = dcv.getValue();
} else {
dcvalues[i] = dcv.getValue() + csv.getAuthoritySeparator() + dcv.getAuthority();
dcvalues[i] += csv.getAuthoritySeparator() + (dcv.getConfidence() != -1 ? dcv.getConfidence() : Choices.CF_ACCEPTED);
}
i++;
log.debug(LogManager.getHeader(c, "metadata_import",
"item_id=" + item.getID() + ",fromCSV=" + all +
",found=" + dcv.getValue()));
}
else
{
dcvalues[i] = dcv.value + DSpaceCSV.authoritySeparator + dcv.authority;
dcvalues[i] += DSpaceCSV.authoritySeparator + (dcv.confidence != -1 ? dcv.confidence : Choices.CF_ACCEPTED);
}
i++;
log.debug(LogManager.getHeader(c, "metadata_import",
"item_id=" + item.getID() + ",fromCSV=" + all +
",found=" + dcv.value));
}else{
dcvalues = line.get(md).toArray(new String[line.get(md).size()]);
}
// Compare from current->csv
for (int v = 0; v < fromCSV.length; v++) {
String value = fromCSV[v];
Metadatum dcv = getDcValueFromCSV(language, schema, element, qualifier, value, fromAuthority);
BulkEditMetadataValue dcv = getBulkEditValueFromCSV(language, schema, element, qualifier, value, fromAuthority);
if (fromAuthority!=null) {
value = dcv.value + DSpaceCSV.authoritySeparator + dcv.authority + DSpaceCSV.authoritySeparator + dcv.confidence;
value = dcv.getValue() + csv.getAuthoritySeparator() + dcv.getAuthority() + csv.getAuthoritySeparator() + dcv.getConfidence();
fromCSV[v] = value;
}
@@ -515,22 +526,24 @@ public class MetadataImport
for (String value : dcvalues)
{
// Look to see if it should be removed
Metadatum dcv = new Metadatum();
dcv.schema = schema;
dcv.element = element;
dcv.qualifier = qualifier;
dcv.language = language;
if (value == null || value.indexOf(DSpaceCSV.authoritySeparator) < 0)
BulkEditMetadataValue dcv = new BulkEditMetadataValue();
dcv.setSchema(schema);
dcv.setElement(element);
dcv.setQualifier(qualifier);
dcv.setLanguage(language);
if (value == null || !value.contains(csv.getAuthoritySeparator()))
simplyCopyValue(value, dcv);
else
{
String[] parts = value.split(DSpaceCSV.escapedAuthoritySeparator);
dcv.value = parts[0];
dcv.authority = parts[1];
dcv.confidence = (parts.length > 2 ? Integer.valueOf(parts[2]) : Choices.CF_ACCEPTED);
String[] parts = value.split(csv.getAuthoritySeparator());
dcv.setValue(parts[0]);
dcv.setAuthority(parts[1]);
dcv.setConfidence((parts.length > 2 ? Integer.valueOf(parts[2]) : Choices.CF_ACCEPTED));
}
if ((value != null) && (!"".equals(value)) && (!contains(value, fromCSV)))
if ((value != null) && (!"".equals(value)) && (!contains(value, fromCSV)) && fromAuthority==null)
// fromAuthority==null: with the current implementation metadata values from external authority sources can only be used to add metadata, not to change or remove them
// because e.g. an author that is not in the column "ORCID:dc.contributor.author" could still be in the column "dc.contributor.author" so don't remove it
{
// Remove it
log.debug(LogManager.getHeader(c, "metadata_import",
@@ -548,73 +561,66 @@ public class MetadataImport
((changes.getAdds().size() > 0) || (changes.getRemoves().size() > 0)))
{
// Get the complete list of what values should now be in that element
List<Metadatum> list = changes.getComplete();
List<BulkEditMetadataValue> list = changes.getComplete();
List<String> values = new ArrayList<String>();
List<String> authorities = new ArrayList<String>();
List<Integer> confidences = new ArrayList<Integer>();
for (Metadatum value : list)
for (BulkEditMetadataValue value : list)
{
if ((qualifier == null) && (language == null))
{
if ((schema.equals(value.schema)) &&
(element.equals(value.element)) &&
(value.qualifier == null) &&
(value.language == null))
if ((schema.equals(value.getSchema())) &&
(element.equals(value.getElement())) &&
(value.getQualifier() == null) &&
(value.getLanguage() == null))
{
values.add(value.value);
authorities.add(value.authority);
confidences.add(value.confidence);
values.add(value.getValue());
authorities.add(value.getAuthority());
confidences.add(value.getConfidence());
}
}
else if (qualifier == null)
{
if ((schema.equals(value.schema)) &&
(element.equals(value.element)) &&
(language.equals(value.language)) &&
(value.qualifier == null))
if ((schema.equals(value.getSchema())) &&
(element.equals(value.getElement())) &&
(language.equals(value.getLanguage())) &&
(value.getQualifier() == null))
{
values.add(value.value);
authorities.add(value.authority);
confidences.add(value.confidence);
values.add(value.getValue());
authorities.add(value.getAuthority());
confidences.add(value.getConfidence());
}
}
else if (language == null)
{
if ((schema.equals(value.schema)) &&
(element.equals(value.element)) &&
(qualifier.equals(value.qualifier)) &&
(value.language == null))
if ((schema.equals(value.getSchema())) &&
(element.equals(value.getElement())) &&
(qualifier.equals(value.getQualifier())) &&
(value.getLanguage() == null))
{
values.add(value.value);
authorities.add(value.authority);
confidences.add(value.confidence);
values.add(value.getValue());
authorities.add(value.getAuthority());
confidences.add(value.getConfidence());
}
}
else
{
if ((schema.equals(value.schema)) &&
(element.equals(value.element)) &&
(qualifier.equals(value.qualifier)) &&
(language.equals(value.language)))
if ((schema.equals(value.getSchema())) &&
(element.equals(value.getElement())) &&
(qualifier.equals(value.getQualifier())) &&
(language.equals(value.getLanguage())))
{
values.add(value.value);
authorities.add(value.authority);
confidences.add(value.confidence);
values.add(value.getValue());
authorities.add(value.getAuthority());
confidences.add(value.getConfidence());
}
}
}
// Set those values
item.clearMetadata(schema, element, qualifier, language);
String[] theValues = values.toArray(new String[values.size()]);
String[] theAuthorities = authorities.toArray(new String[authorities.size()]);
int[] theConfidences = new int[confidences.size()];
for (int k=0; k< confidences.size(); k++)
{
theConfidences[k] = confidences.get(k).intValue();
}
item.addMetadata(schema, element, qualifier, language, theValues, theAuthorities, theConfidences);
item.update();
itemService.clearMetadata(c, item, schema, element, qualifier, language);
itemService.addMetadata(c, item, schema, element, qualifier, language, values, authorities, confidences);
itemService.update(c, item);
}
}
@@ -633,9 +639,9 @@ public class MetadataImport
* @throws IOException Can be thrown when moving items in communities
* @throws MetadataImportException If something goes wrong to be reported back to the user
*/
private void compare(Item item,
protected void compare(Item item,
List<String> collections,
Collection[] actualCollections,
List<Collection> actualCollections,
BulkEditChange bechange,
boolean change) throws SQLException, AuthorizeException, IOException, MetadataImportException
{
@@ -643,7 +649,7 @@ public class MetadataImport
String oldOwner = item.getOwningCollection().getHandle();
String newOwner = collections.get(0);
// Resolve the handle to the collection
Collection newCollection = (Collection)HandleManager.resolveToObject(c, newOwner);
Collection newCollection = (Collection) handleService.resolveToObject(c, newOwner);
// Check it resolved OK
if (newCollection == null)
@@ -654,7 +660,7 @@ public class MetadataImport
if (!oldOwner.equals(newOwner))
{
// Register the old and new owning collections
bechange.changeOwningCollection(item.getOwningCollection(), (Collection)HandleManager.resolveToObject(c, newOwner));
bechange.changeOwningCollection(item.getOwningCollection(), (Collection) handleService.resolveToObject(c, newOwner));
}
// Second, loop through the strings from the CSV of mapped collections
@@ -678,7 +684,7 @@ public class MetadataImport
}
// Was it found?
DSpaceObject dso = HandleManager.resolveToObject(c, csvcollection);
DSpaceObject dso = handleService.resolveToObject(c, csvcollection);
if ((dso == null) || (dso.getType() != Constants.COLLECTION))
{
throw new MetadataImportException("Collection defined for item " + item.getID() +
@@ -703,7 +709,7 @@ public class MetadataImport
for (String csvcollection : collections)
{
// Don't check the owning collection
if ((first) && (collection.getID() == item.getOwningCollection().getID()))
if ((first) && (collection.getID().equals(item.getOwningCollection().getID())))
{
found = true;
}
@@ -730,17 +736,17 @@ public class MetadataImport
if (change)
{
// Remove old mapped collections
for (Collection c : bechange.getOldMappedCollections())
for (Collection collection : bechange.getOldMappedCollections())
{
c.removeItem(item);
collectionService.removeItem(c, collection, item);
}
// Add to new owned collection
if (bechange.getNewOwningCollection() != null)
{
bechange.getNewOwningCollection().addItem(item);
collectionService.addItem(c, bechange.getNewOwningCollection(), item);
item.setOwningCollection(bechange.getNewOwningCollection());
item.update();
itemService.update(c, item);
}
// Remove from old owned collection (if still a member)
@@ -749,7 +755,7 @@ public class MetadataImport
boolean found = false;
for (Collection c : item.getCollections())
{
if (c.getID() == bechange.getOldOwningCollection().getID())
if (c.getID().equals(bechange.getOldOwningCollection().getID()))
{
found = true;
}
@@ -757,14 +763,14 @@ public class MetadataImport
if (found)
{
bechange.getOldOwningCollection().removeItem(item);
collectionService.removeItem(c, bechange.getOldOwningCollection(), item);
}
}
// Add to new mapped collections
for (Collection c : bechange.getNewMappedCollections())
for (Collection collection : bechange.getNewMappedCollections())
{
c.addItem(item);
collectionService.addItem(c, collection, item);
}
}
@@ -780,7 +786,7 @@ public class MetadataImport
* @throws SQLException when an SQL error has occurred (querying DSpace)
* @throws AuthorizeException If the user can't make the changes
*/
private void add(String[] fromCSV, String md, BulkEditChange changes)
protected void add(String[] fromCSV, String md, BulkEditChange changes)
throws SQLException, AuthorizeException
{
// Don't add owning collection or action
@@ -797,7 +803,7 @@ public class MetadataImport
String[] bits = md.split("\\[");
language = bits[1].substring(0, bits[1].length() - 1);
}
AuthorityValue fromAuthority = getAuthorityValueType(md);
AuthorityValue fromAuthority = authorityValueService.getAuthorityValueType(md);
if (md.indexOf(':') > 0) {
md = md.substring(md.indexOf(':')+1);
}
@@ -825,9 +831,9 @@ public class MetadataImport
// Add all the values
for (String value : fromCSV)
{
Metadatum dcv = getDcValueFromCSV(language, schema, element, qualifier, value, fromAuthority);
BulkEditMetadataValue dcv = getBulkEditValueFromCSV(language, schema, element, qualifier, value, fromAuthority);
if(fromAuthority!=null){
value = dcv.value + DSpaceCSV.authoritySeparator + dcv.authority + DSpaceCSV.authoritySeparator + dcv.confidence;
value = dcv.getValue() + csv.getAuthoritySeparator() + dcv.getAuthority() + csv.getAuthoritySeparator() + dcv.getConfidence();
}
// Add it
@@ -838,59 +844,48 @@ public class MetadataImport
}
}
public static AuthorityValue getAuthorityValueType(String md) {
AuthorityValue fromAuthority = null;
List<AuthorityValue> types = AuthorityValue.getAuthorityTypes().getTypes();
for (AuthorityValue type : types) {
if (StringUtils.startsWithIgnoreCase(md,type.getAuthorityType())) {
fromAuthority = type;
}
}
return fromAuthority;
}
private Metadatum getDcValueFromCSV(String language, String schema, String element, String qualifier, String value, AuthorityValue fromAuthority) {
protected BulkEditMetadataValue getBulkEditValueFromCSV(String language, String schema, String element, String qualifier, String value, AuthorityValue fromAuthority) {
// Look to see if it should be removed
Metadatum dcv = new Metadatum();
dcv.schema = schema;
dcv.element = element;
dcv.qualifier = qualifier;
dcv.language = language;
BulkEditMetadataValue dcv = new BulkEditMetadataValue();
dcv.setSchema(schema);
dcv.setElement(element);
dcv.setQualifier(qualifier);
dcv.setLanguage(language);
if (fromAuthority != null) {
if (value.indexOf(':') > 0) {
value = value.substring(0, value.indexOf(':'));
}
// look up the value and authority in solr
List<AuthorityValue> byValue = authorityValueFinder.findByValue(c, schema, element, qualifier, value);
List<AuthorityValue> byValue = authorityValueService.findByValue(c, schema, element, qualifier, value);
AuthorityValue authorityValue = null;
if (byValue.isEmpty()) {
String toGenerate = fromAuthority.generateString() + value;
String field = schema + "_" + element + (StringUtils.isNotBlank(qualifier) ? "_" + qualifier : "");
authorityValue = AuthorityValueGenerator.generate(toGenerate, value, field);
dcv.authority = toGenerate;
authorityValue = authorityValueService.generate(c, toGenerate, value, field);
dcv.setAuthority(toGenerate);
} else {
authorityValue = byValue.get(0);
dcv.authority = authorityValue.getId();
dcv.setAuthority(authorityValue.getId());
}
dcv.value = authorityValue.getValue();
dcv.confidence = Choices.CF_ACCEPTED;
} else if (value == null || !value.contains(DSpaceCSV.authoritySeparator)) {
dcv.setValue(authorityValue.getValue());
dcv.setConfidence(Choices.CF_ACCEPTED);
} else if (value == null || !value.contains(csv.getAuthoritySeparator())) {
simplyCopyValue(value, dcv);
} else {
String[] parts = value.split(DSpaceCSV.escapedAuthoritySeparator);
dcv.value = parts[0];
dcv.authority = parts[1];
dcv.confidence = (parts.length > 2 ? Integer.valueOf(parts[2]) : Choices.CF_ACCEPTED);
String[] parts = value.split(csv.getEscapedAuthoritySeparator());
dcv.setValue(parts[0]);
dcv.setAuthority(parts[1]);
dcv.setConfidence((parts.length > 2 ? Integer.valueOf(parts[2]) : Choices.CF_ACCEPTED));
}
return dcv;
}
private void simplyCopyValue(String value, Metadatum dcv) {
dcv.value = value;
dcv.authority = null;
dcv.confidence = Choices.CF_UNSET;
protected void simplyCopyValue(String value, BulkEditMetadataValue dcv) {
dcv.setValue(value);
dcv.setAuthority(null);
dcv.setConfidence(Choices.CF_UNSET);
}
/**
@@ -900,7 +895,7 @@ public class MetadataImport
* @param haystack The array of Strings to search through
* @return Whether or not it is contained
*/
private boolean contains(String needle, String[] haystack)
protected boolean contains(String needle, String[] haystack)
{
// Look for the needle in the haystack
for (String examine : haystack)
@@ -919,7 +914,7 @@ public class MetadataImport
* @param in The element to clean
* @return The cleaned up element
*/
private String clean(String in)
protected String clean(String in)
{
// Check for nulls
if (in == null)
@@ -960,8 +955,8 @@ public class MetadataImport
for (BulkEditChange change : changes)
{
// Get the changes
List<Metadatum> adds = change.getAdds();
List<Metadatum> removes = change.getRemoves();
List<BulkEditMetadataValue> adds = change.getAdds();
List<BulkEditMetadataValue> removes = change.getRemoves();
List<Collection> newCollections = change.getNewMappedCollections();
List<Collection> oldCollections = change.getOldMappedCollections();
if ((adds.size() > 0) || (removes.size() > 0) ||
@@ -1099,16 +1094,16 @@ public class MetadataImport
}
// Show additions
for (Metadatum dcv : adds)
for (BulkEditMetadataValue metadataValue : adds)
{
String md = dcv.schema + "." + dcv.element;
if (dcv.qualifier != null)
String md = metadataValue.getSchema() + "." + metadataValue.getElement();
if (metadataValue.getQualifier() != null)
{
md += "." + dcv.qualifier;
md += "." + metadataValue.getQualifier();
}
if (dcv.language != null)
if (metadataValue.getLanguage() != null)
{
md += "[" + dcv.language + "]";
md += "[" + metadataValue.getLanguage() + "]";
}
if (!changed)
{
@@ -1118,26 +1113,26 @@ public class MetadataImport
{
System.out.print(" + Added (" + md + "): ");
}
System.out.print(dcv.value);
System.out.print(metadataValue.getValue());
if (isAuthorityControlledField(md))
{
System.out.print(", authority = " + dcv.authority);
System.out.print(", confidence = " + dcv.confidence);
System.out.print(", authority = " + metadataValue.getAuthority());
System.out.print(", confidence = " + metadataValue.getConfidence());
}
System.out.println("");
}
// Show removals
for (Metadatum dcv : removes)
for (BulkEditMetadataValue metadataValue : removes)
{
String md = dcv.schema + "." + dcv.element;
if (dcv.qualifier != null)
String md = metadataValue.getSchema() + "." + metadataValue.getElement();
if (metadataValue.getQualifier() != null)
{
md += "." + dcv.qualifier;
md += "." + metadataValue.getQualifier();
}
if (dcv.language != null)
if (metadataValue.getLanguage() != null)
{
md += "[" + dcv.language + "]";
md += "[" + metadataValue.getLanguage() + "]";
}
if (!changed)
{
@@ -1147,11 +1142,11 @@ public class MetadataImport
{
System.out.print(" - Removed (" + md + "): ");
}
System.out.print(dcv.value);
System.out.print(metadataValue.getValue());
if (isAuthorityControlledField(md))
{
System.out.print(", authority = " + dcv.authority);
System.out.print(", confidence = " + dcv.confidence);
System.out.print(", authority = " + metadataValue.getAuthority());
System.out.print(", confidence = " + metadataValue.getConfidence());
}
System.out.println("");
}
@@ -1165,8 +1160,8 @@ public class MetadataImport
*/
private static boolean isAuthorityControlledField(String md)
{
int pos = md.indexOf("[");
String mdf = (pos > -1 ? md.substring(0, pos) : md);
String mdf = StringUtils.substringAfter(md, ":");
mdf = StringUtils.substringBefore(mdf, "[");
return authorityControlled.contains(mdf);
}
@@ -1282,11 +1277,10 @@ public class MetadataImport
String e = line.getOptionValue('e');
if (e.indexOf('@') != -1)
{
eperson = EPerson.findByEmail(c, e);
}
else
eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(c, e);
} else
{
eperson = EPerson.find(c, Integer.parseInt(e));
eperson = EPersonServiceFactory.getInstance().getEPersonService().find(c, UUID.fromString(e));
}
if (eperson == null)
@@ -1402,7 +1396,7 @@ public class MetadataImport
displayChanges(changes, true);
// Commit the change to the DB
c.commit();
// c.commit();
}
// Finsh off and tidy up

View File

@@ -85,6 +85,7 @@ public class MetadataImportInvalidHeadingException extends Exception
*
* @return The exception message
*/
@Override
public String getMessage()
{
if (type == SCHEMA)

View File

@@ -8,10 +8,8 @@
package org.dspace.app.checker;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.sql.SQLException;
import java.util.*;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
@@ -22,16 +20,11 @@ import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.log4j.Logger;
import org.dspace.checker.BitstreamDispatcher;
import org.dspace.checker.BitstreamInfoDAO;
import org.dspace.checker.CheckerCommand;
import org.dspace.checker.HandleDispatcher;
import org.dspace.checker.LimitedCountDispatcher;
import org.dspace.checker.LimitedDurationDispatcher;
import org.dspace.checker.ListDispatcher;
import org.dspace.checker.ResultsLogger;
import org.dspace.checker.ResultsPruner;
import org.dspace.checker.SimpleDispatcher;
import org.dspace.checker.*;
import org.dspace.content.Bitstream;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamService;
import org.dspace.core.Context;
import org.dspace.core.Utils;
/**
@@ -46,6 +39,8 @@ public final class ChecksumChecker
{
private static final Logger LOG = Logger.getLogger(ChecksumChecker.class);
private static final BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();
/**
* Blanked off constructor, this class should be used as a command line
* tool.
@@ -78,8 +73,7 @@ public final class ChecksumChecker
* <dd>Don't prune results before running checker</dd>
* </dl>
*/
public static void main(String[] args)
{
public static void main(String[] args) throws SQLException {
// set up command line parser
CommandLineParser parser = new PosixParser();
CommandLine line = null;
@@ -126,112 +120,122 @@ public final class ChecksumChecker
{
printHelp(options);
}
Context context = null;
try {
context = new Context();
// Prune stage
if (line.hasOption('p'))
{
ResultsPruner rp = null;
try
{
rp = (line.getOptionValue('p') != null) ? ResultsPruner
.getPruner(line.getOptionValue('p')) : ResultsPruner
.getDefaultPruner();
}
catch (FileNotFoundException e)
{
LOG.error("File not found", e);
System.exit(1);
}
int count = rp.prune();
System.out.println("Pruned " + count
+ " old results from the database.");
}
Date processStart = Calendar.getInstance().getTime();
BitstreamDispatcher dispatcher = null;
// process should loop infinitely through
// most_recent_checksum table
if (line.hasOption('l'))
{
dispatcher = new SimpleDispatcher(new BitstreamInfoDAO(), processStart, false);
}
else if (line.hasOption('L'))
{
dispatcher = new SimpleDispatcher(new BitstreamInfoDAO(), processStart, true);
}
else if (line.hasOption('b'))
{
// check only specified bitstream(s)
String[] ids = line.getOptionValues('b');
List<Integer> idList = new ArrayList<Integer>(ids.length);
for (int i = 0; i < ids.length; i++)
// Prune stage
if (line.hasOption('p'))
{
ResultsPruner rp = null;
try
{
idList.add(Integer.valueOf(ids[i]));
rp = (line.getOptionValue('p') != null) ? ResultsPruner
.getPruner(context, line.getOptionValue('p')) : ResultsPruner
.getDefaultPruner(context);
}
catch (NumberFormatException nfe)
catch (FileNotFoundException e)
{
System.err.println("The following argument: " + ids[i]
+ " is not an integer");
LOG.error("File not found", e);
System.exit(1);
}
int count = rp.prune();
System.out.println("Pruned " + count
+ " old results from the database.");
}
Date processStart = Calendar.getInstance().getTime();
BitstreamDispatcher dispatcher = null;
// process should loop infinitely through
// most_recent_checksum table
if (line.hasOption('l'))
{
dispatcher = new SimpleDispatcher(context, processStart, false);
}
else if (line.hasOption('L'))
{
dispatcher = new SimpleDispatcher(context, processStart, true);
}
else if (line.hasOption('b'))
{
// check only specified bitstream(s)
String[] ids = line.getOptionValues('b');
List<Bitstream> bitstreams = new ArrayList<>(ids.length);
for (int i = 0; i < ids.length; i++)
{
try
{
bitstreams.add(bitstreamService.find(context, UUID.fromString(ids[i])));
}
catch (NumberFormatException nfe)
{
System.err.println("The following argument: " + ids[i]
+ " is not an integer");
System.exit(0);
}
}
dispatcher = new IteratorDispatcher(bitstreams.iterator());
}
else if (line.hasOption('a'))
{
dispatcher = new HandleDispatcher(context, line.getOptionValue('a'));
}
else if (line.hasOption('d'))
{
// run checker process for specified duration
try
{
dispatcher = new LimitedDurationDispatcher(
new SimpleDispatcher(context, processStart, true), new Date(
System.currentTimeMillis()
+ Utils.parseDuration(line
.getOptionValue('d'))));
}
catch (Exception e)
{
LOG.fatal("Couldn't parse " + line.getOptionValue('d')
+ " as a duration: ", e);
System.exit(0);
}
}
dispatcher = new ListDispatcher(idList);
}
else if (line.hasOption('c'))
{
int count = Integer.valueOf(line.getOptionValue('c'));
else if (line.hasOption('a'))
{
dispatcher = new HandleDispatcher(new BitstreamInfoDAO(), line.getOptionValue('a'));
}
else if (line.hasOption('d'))
{
// run checker process for specified duration
try
{
dispatcher = new LimitedDurationDispatcher(
new SimpleDispatcher(new BitstreamInfoDAO(), processStart, true), new Date(
System.currentTimeMillis()
+ Utils.parseDuration(line
.getOptionValue('d'))));
// run checker process for specified number of bitstreams
dispatcher = new LimitedCountDispatcher(new SimpleDispatcher(
context, processStart, false), count);
}
catch (Exception e)
else
{
LOG.fatal("Couldn't parse " + line.getOptionValue('d')
+ " as a duration: ", e);
System.exit(0);
dispatcher = new LimitedCountDispatcher(new SimpleDispatcher(
context, processStart, false), 1);
}
ResultsLogger logger = new ResultsLogger(processStart);
CheckerCommand checker = new CheckerCommand(context);
// verbose reporting
if (line.hasOption('v'))
{
checker.setReportVerbose(true);
}
checker.setProcessStartDate(processStart);
checker.setDispatcher(dispatcher);
checker.setCollector(logger);
checker.process();
context.complete();
context = null;
} finally {
if(context != null){
context.abort();
}
}
else if (line.hasOption('c'))
{
int count = Integer.valueOf(line.getOptionValue('c')).intValue();
// run checker process for specified number of bitstreams
dispatcher = new LimitedCountDispatcher(new SimpleDispatcher(
new BitstreamInfoDAO(), processStart, false), count);
}
else
{
dispatcher = new LimitedCountDispatcher(new SimpleDispatcher(
new BitstreamInfoDAO(), processStart, false), 1);
}
ResultsLogger logger = new ResultsLogger(processStart);
CheckerCommand checker = new CheckerCommand();
// verbose reporting
if (line.hasOption('v'))
{
checker.setReportVerbose(true);
}
checker.setProcessStartDate(processStart);
checker.setDispatcher(dispatcher);
checker.setCollector(logger);
checker.process();
System.exit(0);
}
/**

View File

@@ -9,7 +9,9 @@ package org.dspace.app.harvest;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
@@ -20,15 +22,21 @@ import org.dspace.authorize.AuthorizeException;
import org.dspace.browse.IndexBrowse;
import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.ItemService;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.harvest.HarvestedCollection;
import org.dspace.content.Item;
import org.dspace.content.ItemIterator;
import org.dspace.harvest.HarvestingException;
import org.dspace.harvest.OAIHarvester;
import org.dspace.harvest.OAIHarvester.HarvestingException;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.handle.HandleManager;
import org.dspace.harvest.factory.HarvestServiceFactory;
import org.dspace.harvest.service.HarvestedCollectionService;
/**
* Test class for harvested collections.
@@ -38,7 +46,11 @@ import org.dspace.handle.HandleManager;
public class Harvest
{
private static Context context;
private static final HarvestedCollectionService harvestedCollectionService = HarvestServiceFactory.getInstance().getHarvestedCollectionService();
private static final EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
private static final CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
public static void main(String[] argv) throws Exception
{
// create an options object and populate it
@@ -195,11 +207,11 @@ public class Harvest
System.exit(1);
}
List<Integer> cids = HarvestedCollection.findAll(context);
System.out.println("Purging the following collections (deleting items and resetting harvest status): " + cids.toString());
for (Integer cid : cids)
List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context);
for (HarvestedCollection harvestedCollection : harvestedCollections)
{
harvester.purgeCollection(cid.toString(), eperson);
System.out.println("Purging the following collections (deleting items and resetting harvest status): " + harvestedCollection.getCollection().getID().toString());
harvester.purgeCollection(harvestedCollection.getCollection().getID().toString(), eperson);
}
context.complete();
}
@@ -272,7 +284,7 @@ public class Harvest
if (collectionID.indexOf('/') != -1)
{
// string has a / so it must be a handle - try and resolve it
dso = HandleManager.resolveToObject(context, collectionID);
dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, collectionID);
// resolved, now make sure it's a collection
if (dso == null || dso.getType() != Constants.COLLECTION)
@@ -289,7 +301,7 @@ public class Harvest
else
{
System.out.println("Looking up by id: " + collectionID + ", parsed as '" + Integer.parseInt(collectionID) + "', " + "in context: " + context);
targetCollection = Collection.find(context, Integer.parseInt(collectionID));
targetCollection = collectionService.find(context, UUID.fromString(collectionID));
}
}
// was the collection valid?
@@ -314,15 +326,15 @@ public class Harvest
System.out.println(collection.getID());
try {
HarvestedCollection hc = HarvestedCollection.find(context, collection.getID());
HarvestedCollection hc = harvestedCollectionService.find(context, collection);
if (hc == null) {
hc = HarvestedCollection.create(context, collection.getID());
hc = harvestedCollectionService.create(context, collection);
}
context.turnOffAuthorisationSystem();
hc.setHarvestParams(type, oaiSource, oaiSetId, mdConfigId);
hc.setHarvestStatus(HarvestedCollection.STATUS_READY);
hc.update();
harvestedCollectionService.update(context, hc);
context.restoreAuthSystemState();
context.complete();
}
@@ -352,11 +364,12 @@ public class Harvest
try
{
EPerson eperson = EPerson.findByEmail(context, email);
EPerson eperson = ePersonService.findByEmail(context, email);
context.setCurrentUser(eperson);
context.turnOffAuthorisationSystem();
ItemIterator it = collection.getAllItems();
ItemService itemService = ContentServiceFactory.getInstance().getItemService();
Iterator<Item> it = itemService.findByCollection(context, collection);
IndexBrowse ib = new IndexBrowse(context);
int i=0;
while (it.hasNext()) {
@@ -364,24 +377,24 @@ public class Harvest
Item item = it.next();
System.out.println("Deleting: " + item.getHandle());
ib.itemRemoved(item);
collection.removeItem(item);
// commit every 50 items
collectionService.removeItem(context, collection, item);
// Dispatch events every 50 items
if (i%50 == 0) {
context.commit();
context.dispatchEvents();
i=0;
}
}
HarvestedCollection hc = HarvestedCollection.find(context, collection.getID());
HarvestedCollection hc = harvestedCollectionService.find(context, collection);
if (hc != null) {
hc.setHarvestResult(null,"");
hc.setLastHarvested(null);
hc.setHarvestMessage("");
hc.setHarvestStatus(HarvestedCollection.STATUS_READY);
hc.setHarvestStartTime(null);
hc.update();
harvestedCollectionService.update(context, hc);
}
context.restoreAuthSystemState();
context.commit();
}
}
catch (Exception e) {
System.out.println("Changes could not be committed");
e.printStackTrace();
@@ -403,7 +416,7 @@ public class Harvest
OAIHarvester harvester = null;
try {
Collection collection = resolveCollection(collectionID);
HarvestedCollection hc = HarvestedCollection.find(context, collection.getID());
HarvestedCollection hc = harvestedCollectionService.find(context, collection);
harvester = new OAIHarvester(context, collection, hc);
System.out.println("success. ");
}
@@ -419,7 +432,7 @@ public class Harvest
try {
// Harvest will not work for an anonymous user
EPerson eperson = EPerson.findByEmail(context, email);
EPerson eperson = ePersonService.findByEmail(context, email);
System.out.println("Harvest started... ");
context.setCurrentUser(eperson);
harvester.runHarvest();
@@ -446,16 +459,14 @@ public class Harvest
try
{
List<Integer> cids = HarvestedCollection.findAll(context);
for (Integer cid : cids)
List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context);
for (HarvestedCollection harvestedCollection : harvestedCollections)
{
HarvestedCollection hc = HarvestedCollection.find(context, cid);
//hc.setHarvestResult(null,"");
hc.setHarvestStartTime(null);
hc.setHarvestStatus(HarvestedCollection.STATUS_READY);
hc.update();
harvestedCollection.setHarvestStartTime(null);
harvestedCollection.setHarvestStatus(HarvestedCollection.STATUS_READY);
harvestedCollectionService.update(context, harvestedCollection);
}
context.commit();
System.out.println("success. ");
}
catch (Exception ex) {
@@ -467,12 +478,12 @@ public class Harvest
/**
* Starts up the harvest scheduler. Terminating this process will stop the scheduler.
*/
private static void startHarvester()
private static void startHarvester()
{
try
{
System.out.print("Starting harvest loop... ");
OAIHarvester.startNewScheduler();
HarvestServiceFactory.getInstance().getHarvestSchedulingService().startNewScheduler();
System.out.println("running. ");
}
catch (Exception ex) {

View File

@@ -0,0 +1,268 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemexport;
import org.apache.commons.cli.*;
import org.dspace.app.itemexport.factory.ItemExportServiceFactory;
import org.dspace.app.itemexport.service.ItemExportService;
import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import java.util.*;
/**
* Item exporter to create simple AIPs for DSpace content. Currently exports
* individual items, or entire collections. For instructions on use, see
* printUsage() method.
* <P>
* ItemExport creates the simple AIP package that the importer also uses. It
* consists of:
* <P>
* /exportdir/42/ (one directory per item) / dublin_core.xml - qualified dublin
* core in RDF schema / contents - text file, listing one file per line / file1
* - files contained in the item / file2 / ...
* <P>
* issues -doesn't handle special characters in metadata (needs to turn &'s into
* &amp;, etc.)
* <P>
* Modified by David Little, UCSD Libraries 12/21/04 to allow the registration
* of files (bitstreams) into DSpace.
*
* @author David Little
* @author Jay Paz
*/
public class ItemExportCLITool {
protected static ItemExportService itemExportService = ItemExportServiceFactory.getInstance().getItemExportService();
protected static HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
protected static ItemService itemService = ContentServiceFactory.getInstance().getItemService();
protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
/*
*
*/
public static void main(String[] argv) throws Exception
{
// create an options object and populate it
CommandLineParser parser = new PosixParser();
Options options = new Options();
options.addOption("t", "type", true, "type: COLLECTION or ITEM");
options.addOption("i", "id", true, "ID or handle of thing to export");
options.addOption("d", "dest", true,
"destination where you want items to go");
options.addOption("m", "migrate", false, "export for migration (remove handle and metadata that will be re-created in new system)");
options.addOption("n", "number", true,
"sequence number to begin exporting items with");
options.addOption("z", "zip", true, "export as zip file (specify filename e.g. export.zip)");
options.addOption("h", "help", false, "help");
// as pointed out by Peter Dietz this provides similar functionality to export metadata
// but it is needed since it directly exports to Simple Archive Format (SAF)
options.addOption("x", "exclude-bitstreams", false, "do not export bitstreams");
CommandLine line = parser.parse(options, argv);
String typeString = null;
String destDirName = null;
String myIDString = null;
int seqStart = -1;
int myType = -1;
Item myItem = null;
Collection mycollection = null;
if (line.hasOption('h'))
{
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("ItemExport\n", options);
System.out
.println("\nfull collection: ItemExport -t COLLECTION -i ID -d dest -n number");
System.out
.println("singleitem: ItemExport -t ITEM -i ID -d dest -n number");
System.exit(0);
}
if (line.hasOption('t')) // type
{
typeString = line.getOptionValue('t');
if ("ITEM".equals(typeString))
{
myType = Constants.ITEM;
}
else if ("COLLECTION".equals(typeString))
{
myType = Constants.COLLECTION;
}
}
if (line.hasOption('i')) // id
{
myIDString = line.getOptionValue('i');
}
if (line.hasOption('d')) // dest
{
destDirName = line.getOptionValue('d');
}
if (line.hasOption('n')) // number
{
seqStart = Integer.parseInt(line.getOptionValue('n'));
}
boolean migrate = false;
if (line.hasOption('m')) // number
{
migrate = true;
}
boolean zip = false;
String zipFileName = "";
if (line.hasOption('z'))
{
zip = true;
zipFileName = line.getOptionValue('z');
}
boolean excludeBitstreams = false;
if (line.hasOption('x'))
{
excludeBitstreams = true;
}
// now validate the args
if (myType == -1)
{
System.out
.println("type must be either COLLECTION or ITEM (-h for help)");
System.exit(1);
}
if (destDirName == null)
{
System.out
.println("destination directory must be set (-h for help)");
System.exit(1);
}
if (seqStart == -1)
{
System.out
.println("sequence start number must be set (-h for help)");
System.exit(1);
}
if (myIDString == null)
{
System.out
.println("ID must be set to either a database ID or a handle (-h for help)");
System.exit(1);
}
Context c = new Context();
c.setIgnoreAuthorization(true);
if (myType == Constants.ITEM)
{
// first, is myIDString a handle?
if (myIDString.indexOf('/') != -1)
{
myItem = (Item) handleService.resolveToObject(c, myIDString);
if ((myItem == null) || (myItem.getType() != Constants.ITEM))
{
myItem = null;
}
}
else
{
myItem = itemService.find(c, UUID.fromString(myIDString));
}
if (myItem == null)
{
System.out
.println("Error, item cannot be found: " + myIDString);
}
}
else
{
if (myIDString.indexOf('/') != -1)
{
// has a / must be a handle
mycollection = (Collection) handleService.resolveToObject(c,
myIDString);
// ensure it's a collection
if ((mycollection == null)
|| (mycollection.getType() != Constants.COLLECTION))
{
mycollection = null;
}
}
else if (myIDString != null)
{
mycollection = collectionService.find(c, UUID.fromString(myIDString));
}
if (mycollection == null)
{
System.out.println("Error, collection cannot be found: "
+ myIDString);
System.exit(1);
}
}
if (zip)
{
Iterator<Item> items;
if (myItem != null)
{
List<Item> myItems = new ArrayList<>();
myItems.add(myItem);
items = myItems.iterator();
}
else
{
System.out.println("Exporting from collection: " + myIDString);
items = itemService.findByCollection(c, mycollection);
}
itemExportService.exportAsZip(c, items, destDirName, zipFileName, seqStart, migrate, excludeBitstreams);
}
else
{
if (myItem != null)
{
// it's only a single item
itemExportService.exportItem(c, Collections.singletonList(myItem).iterator(), destDirName, seqStart, migrate, excludeBitstreams);
}
else
{
System.out.println("Exporting from collection: " + myIDString);
// it's a collection, so do a bunch of items
Iterator<Item> i = itemService.findByCollection(c, mycollection);
itemExportService.exportItem(c, i, destDirName, seqStart, migrate, excludeBitstreams);
}
}
c.complete();
}
}

View File

@@ -0,0 +1,25 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemexport.factory;
import org.dspace.app.itemexport.service.ItemExportService;
import org.dspace.utils.DSpace;
/**
* Abstract factory to get services for the itemexport package, use ItemExportServiceFactory.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
public abstract class ItemExportServiceFactory {
public abstract ItemExportService getItemExportService();
public static ItemExportServiceFactory getInstance(){
return new DSpace().getServiceManager().getServiceByName("itemExportServiceFactory", ItemExportServiceFactory.class);
}
}

View File

@@ -0,0 +1,27 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemexport.factory;
import org.dspace.app.itemexport.service.ItemExportService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Factory implementation to get services for the itemexport package, use ItemExportServiceFactory.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
public class ItemExportServiceFactoryImpl extends ItemExportServiceFactory {
@Autowired(required = true)
private ItemExportService itemExportService;
@Override
public ItemExportService getItemExportService() {
return itemExportService;
}
}

View File

@@ -0,0 +1,270 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemexport.service;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import javax.mail.MessagingException;
import java.io.InputStream;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
/**
* Item exporter to create simple AIPs for DSpace content. Currently exports
* individual items, or entire collections. For instructions on use, see
* printUsage() method.
* <P>
* ItemExport creates the simple AIP package that the importer also uses. It
* consists of:
* <P>
* /exportdir/42/ (one directory per item) / dublin_core.xml - qualified dublin
* core in RDF schema / contents - text file, listing one file per line / file1
* - files contained in the item / file2 / ...
* <P>
* issues -doesn't handle special characters in metadata (needs to turn &'s into
* &amp;, etc.)
* <P>
* Modified by David Little, UCSD Libraries 12/21/04 to allow the registration
* of files (bitstreams) into DSpace.
*
* @author David Little
* @author Jay Paz
*/
public interface ItemExportService {
/**
* used for export download
*/
public static final String COMPRESSED_EXPORT_MIME_TYPE = "application/zip";
public void exportItem(Context c, Iterator<Item> i,
String destDirName, int seqStart, boolean migrate,
boolean excludeBitstreams) throws Exception;
/**
* Method to perform an export and save it as a zip file.
*
* @param context The DSpace Context
* @param items The items to export
* @param destDirName The directory to save the export in
* @param zipFileName The name to save the zip file as
* @param seqStart The first number in the sequence
* @param migrate Whether to use the migrate option or not
* @throws Exception
*/
public void exportAsZip(Context context, Iterator<Item> items,
String destDirName, String zipFileName,
int seqStart, boolean migrate,
boolean excludeBitstreams) throws Exception;
/**
* Convenience methot to create export a single Community, Collection, or
* Item
*
* @param dso
* - the dspace object to export
* @param context
* - the dspace context
* @throws Exception
*/
public void createDownloadableExport(DSpaceObject dso,
Context context, boolean migrate) throws Exception;
/**
* Convenience method to export a List of dspace objects (Community,
* Collection or Item)
*
* @param dsObjects
* - List containing dspace objects
* @param context
* - the dspace context
* @throws Exception
*/
public void createDownloadableExport(List<DSpaceObject> dsObjects,
Context context, boolean migrate) throws Exception;
/**
* Convenience methot to create export a single Community, Collection, or
* Item
*
* @param dso
* - the dspace object to export
* @param context
* - the dspace context
* @param additionalEmail
* - cc email to use
* @throws Exception
*/
public void createDownloadableExport(DSpaceObject dso,
Context context, String additionalEmail, boolean migrate) throws Exception;
/**
* Convenience method to export a List of dspace objects (Community,
* Collection or Item)
*
* @param dsObjects
* - List containing dspace objects
* @param context
* - the dspace context
* @param additionalEmail
* - cc email to use
* @throws Exception
*/
public void createDownloadableExport(List<DSpaceObject> dsObjects,
Context context, String additionalEmail, boolean migrate) throws Exception;
/**
* Create a file name based on the date and eperson
*
* @param eperson
* - eperson who requested export and will be able to download it
* @param date
* - the date the export process was created
* @return String representing the file name in the form of
* 'export_yyy_MMM_dd_count_epersonID'
* @throws Exception
*/
public String assembleFileName(String type, EPerson eperson,
Date date) throws Exception;
/**
* Use config file entry for org.dspace.app.itemexport.download.dir and id
* of the eperson to create a download directory name
*
* @param ePerson
* - the eperson who requested export archive
* @return String representing a directory in the form of
* org.dspace.app.itemexport.download.dir/epersonID
* @throws Exception
*/
public String getExportDownloadDirectory(EPerson ePerson)
throws Exception;
/**
* Returns config file entry for org.dspace.app.itemexport.work.dir
*
* @return String representing config file entry for
* org.dspace.app.itemexport.work.dir
* @throws Exception
*/
public String getExportWorkDirectory() throws Exception;
/**
* Used to read the export archived. Inteded for download.
*
* @param fileName
* the name of the file to download
* @param eperson
* the eperson requesting the download
* @return an input stream of the file to be downloaded
* @throws Exception
*/
public InputStream getExportDownloadInputStream(String fileName,
EPerson eperson) throws Exception;
/**
* Get the file size of the export archive represented by the file name.
*
* @param fileName
* name of the file to get the size.
* @throws Exception
*/
public long getExportFileSize(Context context, String fileName) throws Exception;
public long getExportFileLastModified(Context context, String fileName)
throws Exception;
/**
* The file name of the export archive contains the eperson id of the person
* who created it When requested for download this method can check if the
* person requesting it is the same one that created it
*
* @param context
* dspace context
* @param fileName
* the file name to check auths for
* @return true if it is the same person false otherwise
*/
public boolean canDownload(Context context, String fileName);
/**
* Reads the download directory for the eperson to see if any export
* archives are available
*
* @param eperson
* @return a list of file names representing export archives that have been
* processed
* @throws Exception
*/
public List<String> getExportsAvailable(EPerson eperson)
throws Exception;
/**
* A clean up method that is ran before a new export archive is created. It
* uses the config file entry 'org.dspace.app.itemexport.life.span.hours' to
* determine if the current exports are too old and need pruging
*
* @param eperson
* - the eperson to clean up
* @throws Exception
*/
public void deleteOldExportArchives(EPerson eperson) throws Exception;
/**
* A clean up method that is ran before a new export archive is created. It
* uses the config file entry 'org.dspace.app.itemexport.life.span.hours' to
* determine if the current exports are too old and need purgeing
* Removes all old exports, not just those for the person doing the export.
*
* @throws Exception
*/
public void deleteOldExportArchives() throws Exception;
/**
* Since the archive is created in a new thread we are unable to communicate
* with calling method about success or failure. We accomplis this
* communication with email instead. Send a success email once the export
* archive is complete and ready for download
*
* @param context
* - the current Context
* @param eperson
* - eperson to send the email to
* @param fileName
* - the file name to be downloaded. It is added to the url in
* the email
* @throws MessagingException
*/
public void emailSuccessMessage(Context context, EPerson eperson,
String fileName) throws MessagingException;
/**
* Since the archive is created in a new thread we are unable to communicate
* with calling method about success or failure. We accomplis this
* communication with email instead. Send an error email if the export
* archive fails
*
* @param eperson
* - EPerson to send the error message to
* @param error
* - the error message
* @throws MessagingException
*/
public void emailErrorMessage(EPerson eperson, String error)
throws MessagingException;
public void zip(String strSource, String target) throws Exception;
}

View File

@@ -0,0 +1,420 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemimport;
import org.apache.commons.cli.*;
import org.dspace.app.itemimport.factory.ItemImportServiceFactory;
import org.dspace.app.itemimport.service.ItemImportService;
import org.dspace.content.Collection;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.search.DSIndexer;
import java.io.File;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.UUID;
/**
* Import items into DSpace. The conventional use is upload files by copying
* them. DSpace writes the item's bitstreams into its assetstore. Metadata is
* also loaded to the DSpace database.
* <P>
* A second use assumes the bitstream files already exist in a storage
* resource accessible to DSpace. In this case the bitstreams are 'registered'.
* That is, the metadata is loaded to the DSpace database and DSpace is given
* the location of the file which is subsumed into DSpace.
* <P>
* The distinction is controlled by the format of lines in the 'contents' file.
* See comments in processContentsFile() below.
* <P>
* Modified by David Little, UCSD Libraries 12/21/04 to
* allow the registration of files (bitstreams) into DSpace.
*/
public class ItemImportCLITool {
private static boolean template = false;
private static final CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
private static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService();
private static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
public static void main(String[] argv) throws Exception
{
DSIndexer.setBatchProcessingMode(true);
Date startTime = new Date();
int status = 0;
try {
// create an options object and populate it
CommandLineParser parser = new PosixParser();
Options options = new Options();
options.addOption("a", "add", false, "add items to DSpace");
options.addOption("b", "add-bte", false, "add items to DSpace via Biblio-Transformation-Engine (BTE)");
options.addOption("r", "replace", false, "replace items in mapfile");
options.addOption("d", "delete", false,
"delete items listed in mapfile");
options.addOption("i", "inputtype", true, "input type in case of BTE import");
options.addOption("s", "source", true, "source of items (directory)");
options.addOption("z", "zip", true, "name of zip file");
options.addOption("c", "collection", true,
"destination collection(s) Handle or database ID");
options.addOption("m", "mapfile", true, "mapfile items in mapfile");
options.addOption("e", "eperson", true,
"email of eperson doing importing");
options.addOption("w", "workflow", false,
"send submission through collection's workflow");
options.addOption("n", "notify", false,
"if sending submissions through the workflow, send notification emails");
options.addOption("t", "test", false,
"test run - do not actually import items");
options.addOption("p", "template", false, "apply template");
options.addOption("R", "resume", false,
"resume a failed import (add only)");
options.addOption("q", "quiet", false, "don't display metadata");
options.addOption("h", "help", false, "help");
CommandLine line = parser.parse(options, argv);
String command = null; // add replace remove, etc
String bteInputType = null; //ris, endnote, tsv, csv, bibtex
String sourcedir = null;
String mapfile = null;
String eperson = null; // db ID or email
String[] collections = null; // db ID or handles
boolean isTest = false;
boolean isResume = false;
boolean useWorkflow = false;
boolean useWorkflowSendEmail = false;
boolean isQuiet = false;
if (line.hasOption('h')) {
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("ItemImport\n", options);
System.out
.println("\nadding items: ItemImport -a -e eperson -c collection -s sourcedir -m mapfile");
System.out
.println("\nadding items from zip file: ItemImport -a -e eperson -c collection -s sourcedir -z filename.zip -m mapfile");
System.out
.println("replacing items: ItemImport -r -e eperson -c collection -s sourcedir -m mapfile");
System.out
.println("deleting items: ItemImport -d -e eperson -m mapfile");
System.out
.println("If multiple collections are specified, the first collection will be the one that owns the item.");
System.exit(0);
}
if (line.hasOption('a')) {
command = "add";
}
if (line.hasOption('r')) {
command = "replace";
}
if (line.hasOption('d')) {
command = "delete";
}
if (line.hasOption('b')) {
command = "add-bte";
}
if (line.hasOption('i')) {
bteInputType = line.getOptionValue('i');
}
if (line.hasOption('w')) {
useWorkflow = true;
if (line.hasOption('n')) {
useWorkflowSendEmail = true;
}
}
if (line.hasOption('t')) {
isTest = true;
System.out.println("**Test Run** - not actually importing items.");
}
if (line.hasOption('p')) {
template = true;
}
if (line.hasOption('s')) // source
{
sourcedir = line.getOptionValue('s');
}
if (line.hasOption('m')) // mapfile
{
mapfile = line.getOptionValue('m');
}
if (line.hasOption('e')) // eperson
{
eperson = line.getOptionValue('e');
}
if (line.hasOption('c')) // collections
{
collections = line.getOptionValues('c');
}
if (line.hasOption('R')) {
isResume = true;
System.out
.println("**Resume import** - attempting to import items not already imported");
}
if (line.hasOption('q')) {
isQuiet = true;
}
boolean zip = false;
String zipfilename = "";
if (line.hasOption('z')) {
zip = true;
zipfilename = sourcedir + System.getProperty("file.separator") + line.getOptionValue('z');
}
//By default assume collections will be given on the command line
boolean commandLineCollections = true;
// now validate
// must have a command set
if (command == null) {
System.out
.println("Error - must run with either add, replace, or remove (run with -h flag for details)");
System.exit(1);
} else if ("add".equals(command) || "replace".equals(command)) {
if (sourcedir == null) {
System.out
.println("Error - a source directory containing items must be set");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
if (mapfile == null) {
System.out
.println("Error - a map file to hold importing results must be specified");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
if (eperson == null) {
System.out
.println("Error - an eperson to do the importing must be specified");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
if (collections == null) {
System.out.println("No collections given. Assuming 'collections' file inside item directory");
commandLineCollections = false;
}
} else if ("add-bte".equals(command)) {
//Source dir can be null, the user can specify the parameters for his loader in the Spring XML configuration file
if (mapfile == null) {
System.out
.println("Error - a map file to hold importing results must be specified");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
if (eperson == null) {
System.out
.println("Error - an eperson to do the importing must be specified");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
if (collections == null) {
System.out.println("No collections given. Assuming 'collections' file inside item directory");
commandLineCollections = false;
}
if (bteInputType == null) {
System.out
.println("Error - an input type (tsv, csv, ris, endnote, bibtex or any other type you have specified in BTE Spring XML configuration file) must be specified");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
} else if ("delete".equals(command)) {
if (eperson == null) {
System.out
.println("Error - an eperson to do the importing must be specified");
System.exit(1);
}
if (mapfile == null) {
System.out.println("Error - a map file must be specified");
System.exit(1);
}
}
// can only resume for adds
if (isResume && !"add".equals(command) && !"add-bte".equals(command)) {
System.out
.println("Error - resume option only works with the --add or the --add-bte commands");
System.exit(1);
}
// do checks around mapfile - if mapfile exists and 'add' is selected,
// resume must be chosen
File myFile = new File(mapfile);
if (!isResume && "add".equals(command) && myFile.exists()) {
System.out.println("Error - the mapfile " + mapfile
+ " already exists.");
System.out
.println("Either delete it or use --resume if attempting to resume an aborted import.");
System.exit(1);
}
ItemImportService myloader = ItemImportServiceFactory.getInstance().getItemImportService();
myloader.setTest(isTest);
myloader.setResume(isResume);
myloader.setUseWorkflow(useWorkflow);
myloader.setUseWorkflowSendEmail(useWorkflowSendEmail);
myloader.setQuiet(isQuiet);
// create a context
Context c = new Context();
// find the EPerson, assign to context
EPerson myEPerson = null;
if (eperson.indexOf('@') != -1) {
// @ sign, must be an email
myEPerson = epersonService.findByEmail(c, eperson);
} else {
myEPerson = epersonService.find(c, UUID.fromString(eperson));
}
if (myEPerson == null) {
System.out.println("Error, eperson cannot be found: " + eperson);
System.exit(1);
}
c.setCurrentUser(myEPerson);
// find collections
List<Collection> mycollections = null;
// don't need to validate collections set if command is "delete"
// also if no collections are given in the command line
if (!"delete".equals(command) && commandLineCollections) {
System.out.println("Destination collections:");
mycollections = new ArrayList<>();
// validate each collection arg to see if it's a real collection
for (int i = 0; i < collections.length; i++) {
// is the ID a handle?
if (collections[i].indexOf('/') != -1) {
// string has a / so it must be a handle - try and resolve
// it
mycollections.add((Collection) handleService
.resolveToObject(c, collections[i]));
// resolved, now make sure it's a collection
if ((mycollections.get(i) == null)
|| (mycollections.get(i).getType() != Constants.COLLECTION)) {
mycollections.set(i, null);
}
}
// not a handle, try and treat it as an integer collection
// database ID
else if (collections[i] != null) {
mycollections.set(i, collectionService.find(c, UUID.fromString(collections[i])));
}
// was the collection valid?
if (mycollections.get(i) == null) {
throw new IllegalArgumentException("Cannot resolve "
+ collections[i] + " to collection");
}
// print progress info
String owningPrefix = "";
if (i == 0) {
owningPrefix = "Owning ";
}
System.out.println(owningPrefix + " Collection: "
+ mycollections.get(i).getName());
}
} // end of validating collections
try {
// If this is a zip archive, unzip it first
if (zip) {
sourcedir = myloader.unzip(sourcedir, zipfilename);
}
c.turnOffAuthorisationSystem();
if ("add".equals(command)) {
myloader.addItems(c, mycollections, sourcedir, mapfile, template);
} else if ("replace".equals(command)) {
myloader.replaceItems(c, mycollections, sourcedir, mapfile, template);
} else if ("delete".equals(command)) {
myloader.deleteItems(c, mapfile);
} else if ("add-bte".equals(command)) {
myloader.addBTEItems(c, mycollections, sourcedir, mapfile, template, bteInputType, null);
}
// complete all transactions
c.complete();
} catch (Exception e) {
c.abort();
e.printStackTrace();
System.out.println(e);
status = 1;
}
// Delete the unzipped file
try {
if (zip) {
System.gc();
System.out.println("Deleting temporary zip directory: " + myloader.getTempWorkDirFile().getAbsolutePath());
myloader.cleanupZipTemp();
}
} catch (Exception ex) {
System.out.println("Unable to delete temporary zip archive location: " + myloader.getTempWorkDirFile().getAbsolutePath());
}
if (isTest) {
System.out.println("***End of Test Run***");
}
} finally {
DSIndexer.setBatchProcessingMode(false);
Date endTime = new Date();
System.out.println("Started: " + startTime.getTime());
System.out.println("Ended: " + endTime.getTime());
System.out.println("Elapsed time: " + ((endTime.getTime() - startTime.getTime()) / 1000) + " secs (" + (endTime.getTime() - startTime.getTime()) + " msecs)");
}
System.exit(status);
}
}

View File

@@ -0,0 +1,25 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemimport.factory;
import org.dspace.app.itemimport.service.ItemImportService;
import org.dspace.utils.DSpace;
/**
* Abstract factory to get services for the itemimport package, use ItemImportService.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
public abstract class ItemImportServiceFactory {
public abstract ItemImportService getItemImportService();
public static ItemImportServiceFactory getInstance(){
return new DSpace().getServiceManager().getServiceByName("itemImportServiceFactory", ItemImportServiceFactory.class);
}
}

View File

@@ -0,0 +1,27 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemimport.factory;
import org.dspace.app.itemimport.service.ItemImportService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Factory implementation to get services for the itemimport package, use ItemImportService.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
public class ItemImportServiceFactoryImpl extends ItemImportServiceFactory {
@Autowired(required = true)
private ItemImportService itemImportService;
@Override
public ItemImportService getItemImportService() {
return itemImportService;
}
}

View File

@@ -0,0 +1,124 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemimport.service;
import org.dspace.app.itemimport.BatchUpload;
import org.dspace.content.Collection;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import javax.mail.MessagingException;
import java.io.File;
import java.io.IOException;
import java.util.List;
/**
* Import items into DSpace. The conventional use is upload files by copying
* them. DSpace writes the item's bitstreams into its assetstore. Metadata is
* also loaded to the DSpace database.
* <P>
* A second use assumes the bitstream files already exist in a storage
* resource accessible to DSpace. In this case the bitstreams are 'registered'.
* That is, the metadata is loaded to the DSpace database and DSpace is given
* the location of the file which is subsumed into DSpace.
* <P>
* The distinction is controlled by the format of lines in the 'contents' file.
* See comments in processContentsFile() below.
* <P>
* Modified by David Little, UCSD Libraries 12/21/04 to
* allow the registration of files (bitstreams) into DSpace.
*/
public interface ItemImportService {
public void addItemsAtomic(Context c, List<Collection> mycollections, String sourceDir, String mapFile, boolean template) throws Exception;
public void addItems(Context c, List<Collection> mycollections,
String sourceDir, String mapFile, boolean template) throws Exception;
public String unzip(File zipfile) throws IOException;
public String unzip(File zipfile, String destDir) throws IOException;
public String unzip(String sourcedir, String zipfilename) throws IOException;
/**
*
* Given a public URL to a zip file that has the Simple Archive Format, this method imports the contents to DSpace
* @param url The public URL of the zip file
* @param owningCollection The owning collection the items will belong to
* @param collections The collections the created items will be inserted to, apart from the owning one
* @param resumeDir In case of a resume request, the directory that containsthe old mapfile and data
* @param context The context
* @throws Exception
*/
public void processUIImport(String url, Collection owningCollection, String[] collections, String resumeDir, String inputType, Context context, boolean template) throws Exception;
/**
* Since the BTE batch import is done in a new thread we are unable to communicate
* with calling method about success or failure. We accomplish this
* communication with email instead. Send a success email once the batch
* import is complete
*
* @param context
* - the current Context
* @param eperson
* - eperson to send the email to
* @param fileName
* - the filepath to the mapfile created by the batch import
* @throws javax.mail.MessagingException
*/
public void emailSuccessMessage(Context context, EPerson eperson,
String fileName) throws MessagingException;
/**
* Since the BTE batch import is done in a new thread we are unable to communicate
* with calling method about success or failure. We accomplis this
* communication with email instead. Send an error email if the batch
* import fails
*
* @param eperson
* - EPerson to send the error message to
* @param error
* - the error message
* @throws MessagingException
*/
public void emailErrorMessage(EPerson eperson, String error)
throws MessagingException;
public List<BatchUpload> getImportsAvailable(EPerson eperson)
throws Exception;
public String getImportUploadableDirectory(EPerson ePerson)
throws Exception;
public void deleteBatchUpload(Context c, String uploadId) throws Exception;
public void replaceItems(Context c, List<Collection> mycollections, String sourcedir, String mapfile, boolean template) throws Exception;
public void deleteItems(Context c, String mapfile) throws Exception;
public void addBTEItems(Context c, List<Collection> mycollections, String sourcedir, String mapfile, boolean template, String bteInputType, String workingDir) throws Exception;
public String getTempWorkDir();
public File getTempWorkDirFile();
public void cleanupZipTemp();
public void setTest(boolean isTest);
public void setResume(boolean isResume);
public void setUseWorkflow(boolean useWorkflow);
public void setUseWorkflowSendEmail(boolean useWorkflow);
public void setQuiet(boolean isQuiet);
}

View File

@@ -9,13 +9,16 @@ package org.dspace.app.itemmarking;
import java.io.UnsupportedEncodingException;
import java.sql.SQLException;
import java.util.List;
import org.dspace.app.util.Util;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.Item;
import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
/**
* This is an item marking Strategy class that tries to mark an item availability
@@ -28,6 +31,9 @@ public class ItemMarkingAvailabilityBitstreamStrategy implements ItemMarkingExtr
private String availableImageName;
private String nonAvailableImageName;
@Autowired(required = true)
protected ItemService itemService;
public ItemMarkingAvailabilityBitstreamStrategy() {
@@ -37,27 +43,27 @@ public class ItemMarkingAvailabilityBitstreamStrategy implements ItemMarkingExtr
public ItemMarkingInfo getItemMarkingInfo(Context context, Item item)
throws SQLException {
Bundle[] bundles = item.getBundles("ORIGINAL");
if (bundles.length == 0){
List<Bundle> bundles = itemService.getBundles(item, "ORIGINAL");
if (bundles.size() == 0){
ItemMarkingInfo markInfo = new ItemMarkingInfo();
markInfo.setImageName(nonAvailableImageName);
return markInfo;
}
else {
Bundle originalBundle = bundles[0];
if (originalBundle.getBitstreams().length == 0){
Bundle originalBundle = bundles.iterator().next();
if (originalBundle.getBitstreams().size() == 0){
ItemMarkingInfo markInfo = new ItemMarkingInfo();
markInfo.setImageName(nonAvailableImageName);
return markInfo;
}
else {
Bitstream bitstream = originalBundle.getBitstreams()[0];
ItemMarkingInfo signInfo = new ItemMarkingInfo();
signInfo.setImageName(availableImageName);
signInfo.setTooltip(bitstream.getName());
Bitstream bitstream = originalBundle.getBitstreams().get(0);
ItemMarkingInfo signInfo = new ItemMarkingInfo();
signInfo.setImageName(availableImageName);
signInfo.setTooltip(bitstream.getName());
@@ -88,4 +94,4 @@ public class ItemMarkingAvailabilityBitstreamStrategy implements ItemMarkingExtr
public void setNonAvailableImageName(String nonAvailableImageName) {
this.nonAvailableImageName = nonAvailableImageName;
}
}
}

View File

@@ -9,11 +9,14 @@ package org.dspace.app.itemmarking;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.dspace.content.Item;
import org.dspace.content.Metadatum;
import org.dspace.content.MetadataValue;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
/**
* This is an item marking Strategy class that tries to mark an item
@@ -25,6 +28,9 @@ import org.dspace.core.Context;
*/
public class ItemMarkingMetadataStrategy implements ItemMarkingExtractor {
@Autowired(required = true)
protected ItemService itemService;
private String metadataField;
Map<String, ItemMarkingInfo> mapping = new HashMap<String, ItemMarkingInfo>();
@@ -37,11 +43,11 @@ public class ItemMarkingMetadataStrategy implements ItemMarkingExtractor {
if (metadataField != null && mapping!=null)
{
Metadatum[] vals = item.getMetadataByMetadataString(metadataField);
if (vals.length > 0)
List<MetadataValue> vals = itemService.getMetadataByMetadataString(item, metadataField);
if (vals.size() > 0)
{
for (Metadatum value : vals){
String type = value.value;
for (MetadataValue value : vals){
String type = value.getValue();
if (mapping.containsKey(type)){
return mapping.get(type);
}

View File

@@ -21,7 +21,7 @@ import java.util.Map;
*/
public class ActionManager implements Iterable<UpdateAction> {
private Map<Class<? extends UpdateAction>, UpdateAction> registry
protected Map<Class<? extends UpdateAction>, UpdateAction> registry
= new LinkedHashMap<Class<? extends UpdateAction>, UpdateAction>();
public UpdateAction getUpdateAction(Class<? extends UpdateAction> actionClass)
@@ -53,24 +53,28 @@ public class ActionManager implements Iterable<UpdateAction> {
*
* @return iterator for UpdateActions
*/
public Iterator<UpdateAction> iterator()
@Override
public Iterator<UpdateAction> iterator()
{
return new Iterator<UpdateAction>()
{
private Iterator<Class<? extends UpdateAction>> itr = registry.keySet().iterator();
public boolean hasNext()
@Override
public boolean hasNext()
{
return itr.hasNext();
}
public UpdateAction next()
@Override
public UpdateAction next()
{
return registry.get(itr.next());
}
//not supported
public void remove()
@Override
public void remove()
{
throw new UnsupportedOperationException();
}

View File

@@ -17,17 +17,16 @@ import java.util.ArrayList;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.Bundle;
import org.dspace.content.DCDate;
import org.dspace.content.FormatIdentifier;
import org.dspace.content.InstallItem;
import org.dspace.content.Item;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.*;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamFormatService;
import org.dspace.content.service.InstallItemService;
import org.dspace.core.Context;
import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.GroupService;
/**
* Action to add bitstreams listed in item contents file to the item in DSpace
@@ -36,6 +35,11 @@ import org.dspace.eperson.Group;
*/
public class AddBitstreamsAction extends UpdateBitstreamsAction {
protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
protected BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance().getBitstreamFormatService();
protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService();
protected InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService();
public AddBitstreamsAction()
{
//empty
@@ -54,7 +58,8 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
* @throws AuthorizeException
* @throws SQLException
*/
public void execute(Context context, ItemArchive itarch, boolean isTest,
@Override
public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws IllegalArgumentException,
ParseException, IOException, AuthorizeException, SQLException
{
@@ -107,12 +112,12 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
String append = ". Added " + Integer.toString(bitstream_bundles_updated)
+ " bitstream(s) on " + DCDate.getCurrent() + " : "
+ InstallItem.getBitstreamProvenanceMessage(item);
MetadataUtilities.appendMetadata(item, dtom, false, append);
+ installItemService.getBitstreamProvenanceMessage(context, item);
MetadataUtilities.appendMetadata(context, item, dtom, false, append);
}
}
private String addBitstream(Context context, ItemArchive itarch, Item item, File dir,
protected String addBitstream(Context context, ItemArchive itarch, Item item, File dir,
ContentsEntry ce, boolean suppressUndo, boolean isTest)
throws IOException, IllegalArgumentException, SQLException, AuthorizeException, ParseException
{
@@ -141,23 +146,23 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
if (!isTest)
{
// find the bundle
Bundle[] bundles = item.getBundles(newBundleName);
List<Bundle> bundles = itemService.getBundles(item, newBundleName);
Bundle targetBundle = null;
if (bundles.length < 1)
if (bundles.size() < 1)
{
// not found, create a new one
targetBundle = item.createBundle(newBundleName);
targetBundle = bundleService.create(context, item, newBundleName);
}
else
{
//verify bundle + name are not duplicates
for (Bundle b : bundles)
{
Bitstream[] bitstreams = b.getBitstreams();
List<Bitstream> bitstreams = b.getBitstreams();
for (Bitstream bsm : bitstreams)
{
if (bsm.getName().equals(ce.filename))
if (bsm.getName().equals(ce.filename))
{
throw new IllegalArgumentException("Duplicate bundle + filename cannot be added: "
+ b.getName() + " + " + bsm.getName());
@@ -166,39 +171,35 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
}
// select first bundle
targetBundle = bundles[0];
targetBundle = bundles.iterator().next();
}
bs = targetBundle.createBitstream(bis);
bs.setName(ce.filename);
bs = bitstreamService.create(context, targetBundle, bis);
bs.setName(context, ce.filename);
// Identify the format
// FIXME - guessing format guesses license.txt incorrectly as a text file format!
BitstreamFormat fmt = FormatIdentifier.guessFormat(context, bs);
bs.setFormat(fmt);
BitstreamFormat fmt = bitstreamFormatService.guessFormat(context, bs);
bitstreamService.setFormat(context, bs, fmt);
if (ce.description != null)
{
bs.setDescription(ce.description);
bs.setDescription(context, ce.description);
}
if ((ce.permissionsActionId != -1) && (ce.permissionsGroupName != null))
{
Group group = Group.findByName(context, ce.permissionsGroupName);
Group group = groupService.findByName(context, ce.permissionsGroupName);
if (group != null)
{
AuthorizeManager.removeAllPolicies(context, bs); // remove the default policy
ResourcePolicy rp = ResourcePolicy.create(context);
rp.setResource(bs);
rp.setAction(ce.permissionsActionId);
rp.setGroup(group);
rp.update();
authorizeService.removeAllPolicies(context, bs); // remove the default policy
authorizeService.createResourcePolicy(context, bs, group, null, ce.permissionsActionId, null);
}
}
//update after all changes are applied
bs.update();
bitstreamService.update(context, bs);
if (!suppressUndo)
{

View File

@@ -8,13 +8,17 @@
package org.dspace.app.itemupdate;
import java.sql.SQLException;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Metadatum;
import org.dspace.content.MetadataValue;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataSchemaService;
import org.dspace.core.Context;
/**
@@ -22,7 +26,10 @@ import org.dspace.core.Context;
*
*/
public class AddMetadataAction extends UpdateMetadataAction {
protected MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance().getMetadataSchemaService();
protected MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService();
/**
* Adds metadata specified in the source archive
*
@@ -33,7 +40,8 @@ public class AddMetadataAction extends UpdateMetadataAction {
* @throws AuthorizeException
* @throws SQLException
*/
public void execute(Context context, ItemArchive itarch, boolean isTest,
@Override
public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws AuthorizeException, SQLException
{
Item item = itarch.getItem();
@@ -47,13 +55,13 @@ public class AddMetadataAction extends UpdateMetadataAction {
{
// match against metadata for this field/value in repository
// qualifier must be strictly matched, possibly null
Metadatum[] ardcv = null;
ardcv = item.getMetadata(dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
List<MetadataValue> ardcv = null;
ardcv = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
boolean found = false;
for (Metadatum dcv : ardcv)
for (MetadataValue dcv : ardcv)
{
if (dcv.value.equals(dtom.value))
if (dcv.getValue().equals(dtom.value))
{
found = true;
break;
@@ -72,7 +80,7 @@ public class AddMetadataAction extends UpdateMetadataAction {
ItemUpdate.pr("Metadata to add: " + dtom.toString());
//validity tests that would occur in actual processing
// If we're just test the import, let's check that the actual metadata field exists.
MetadataSchema foundSchema = MetadataSchema.find(context, dtom.schema);
MetadataSchema foundSchema = metadataSchemaService.find(context, dtom.schema);
if (foundSchema == null)
{
@@ -81,8 +89,7 @@ public class AddMetadataAction extends UpdateMetadataAction {
}
else
{
int schemaID = foundSchema.getSchemaID();
MetadataField foundField = MetadataField.findByElement(context, schemaID, dtom.element, dtom.qualifier);
MetadataField foundField = metadataFieldService.findByElement(context, foundSchema, dtom.element, dtom.qualifier);
if (foundField == null)
{
@@ -93,7 +100,7 @@ public class AddMetadataAction extends UpdateMetadataAction {
}
else
{
item.addMetadata(dtom.schema, dtom.element, dtom.qualifier, dtom.language, dtom.value);
itemService.addMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language, dtom.value);
ItemUpdate.pr("Metadata added: " + dtom.toString());
if (!suppressUndo)
@@ -102,10 +109,12 @@ public class AddMetadataAction extends UpdateMetadataAction {
//ItemUpdate.pr("Undo metadata: " + dtom);
// add all as a replace record to be preceded by delete
for (Metadatum dcval : ardcv)
{
itarch.addUndoMetadataField(DtoMetadata.create(dcval.schema, dcval.element,
dcval.qualifier, dcval.language, dcval.value));
for (MetadataValue dcval : ardcv)
{
MetadataField metadataField = dcval.getMetadataField();
MetadataSchema metadataSchema = metadataField.getMetadataSchema();
itarch.addUndoMetadataField(DtoMetadata.create(metadataSchema.getName(), metadataField.getElement(),
metadataField.getQualifier(), dcval.getLanguage(), dcval.getValue()));
}
}

View File

@@ -8,6 +8,7 @@
package org.dspace.app.itemupdate;
import java.sql.SQLException;
import java.util.List;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
@@ -33,7 +34,8 @@ public class BitstreamFilterByBundleName extends BitstreamFilter {
* @return whether bitstream is in bundle
*
*/
public boolean accept(Bitstream bitstream)
@Override
public boolean accept(Bitstream bitstream)
throws BitstreamFilterException
{
if (bundleName == null)
@@ -47,10 +49,10 @@ public class BitstreamFilterByBundleName extends BitstreamFilter {
try
{
Bundle[] bundles = bitstream.getBundles();
List<Bundle> bundles = bitstream.getBundles();
for (Bundle b : bundles)
{
if (b.getName().equals(bundleName))
if (b.getName().equals(bundleName))
{
return true;
}

View File

@@ -17,8 +17,8 @@ import org.dspace.content.Bitstream;
*/
public class BitstreamFilterByFilename extends BitstreamFilter {
private Pattern pattern;
private String filenameRegex;
protected Pattern pattern;
protected String filenameRegex;
public BitstreamFilterByFilename()
{
@@ -31,7 +31,8 @@ public class BitstreamFilterByFilename extends BitstreamFilter {
*
* @return whether bitstream name matches the regular expression
*/
public boolean accept(Bitstream bitstream) throws BitstreamFilterException
@Override
public boolean accept(Bitstream bitstream) throws BitstreamFilterException
{
if (filenameRegex == null)
{

View File

@@ -41,7 +41,7 @@ public class ContentsEntry
final int permissionsActionId;
final String description;
private ContentsEntry(String filename,
protected ContentsEntry(String filename,
String bundlename,
int permissionsActionId,
String permissionsGroupName,

View File

@@ -14,10 +14,7 @@ import java.text.ParseException;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.DCDate;
import org.dspace.content.Item;
import org.dspace.content.*;
import org.dspace.core.Context;
/**
@@ -43,7 +40,8 @@ public class DeleteBitstreamsAction extends UpdateBitstreamsAction
* @throws AuthorizeException
* @throws SQLException
*/
public void execute(Context context, ItemArchive itarch, boolean isTest,
@Override
public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws IllegalArgumentException, IOException,
SQLException, AuthorizeException, ParseException
{
@@ -54,34 +52,34 @@ public class DeleteBitstreamsAction extends UpdateBitstreamsAction
}
else
{
List<Integer> list = MetadataUtilities.readDeleteContentsFile(f);
List<String> list = MetadataUtilities.readDeleteContentsFile(f);
if (list.isEmpty())
{
ItemUpdate.pr("Warning: empty delete_contents file for item " + itarch.getDirectoryName() );
}
else
{
for (int id : list)
for (String id : list)
{
try
{
Bitstream bs = Bitstream.find(context, id);
Bitstream bs = bitstreamService.findByIdOrLegacyId(context, id);
if (bs == null)
{
ItemUpdate.pr("Bitstream not found by id: " + id);
}
else
{
Bundle[] bundles = bs.getBundles();
List<Bundle> bundles = bs.getBundles();
for (Bundle b : bundles)
{
if (isTest)
if (isTest)
{
ItemUpdate.pr("Delete bitstream with id = " + id);
}
else
{
b.removeBitstream(bs);
bundleService.removeBitstream(context, b, bs);
ItemUpdate.pr("Deleted bitstream with id = " + id);
}
@@ -92,12 +90,12 @@ public class DeleteBitstreamsAction extends UpdateBitstreamsAction
DtoMetadata dtom = DtoMetadata.create("dc.description.provenance", "en", "");
String append = "Bitstream " + bs.getName() + " deleted on " + DCDate.getCurrent() + "; ";
Item item = bundles[0].getItems()[0];
Item item = bundles.iterator().next().getItems().iterator().next();
ItemUpdate.pr("Append provenance with: " + append);
if (!isTest)
{
MetadataUtilities.appendMetadata(item, dtom, false, append);
MetadataUtilities.appendMetadata(context, item, dtom, false, append);
}
}
}

View File

@@ -14,10 +14,7 @@ import java.util.ArrayList;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.DCDate;
import org.dspace.content.Item;
import org.dspace.content.*;
import org.dspace.core.Context;
/**
@@ -33,7 +30,7 @@ import org.dspace.core.Context;
*/
public class DeleteBitstreamsByFilterAction extends UpdateBitstreamsAction {
private BitstreamFilter filter;
protected BitstreamFilter filter;
/**
* Set filter
@@ -67,7 +64,8 @@ public class DeleteBitstreamsByFilterAction extends UpdateBitstreamsAction {
* @throws AuthorizeException
* @throws SQLException
*/
public void execute(Context context, ItemArchive itarch, boolean isTest,
@Override
public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws AuthorizeException,
BitstreamFilterException, IOException, ParseException, SQLException
{
@@ -75,16 +73,16 @@ public class DeleteBitstreamsByFilterAction extends UpdateBitstreamsAction {
List<String> deleted = new ArrayList<String>();
Item item = itarch.getItem();
Bundle[] bundles = item.getBundles();
List<Bundle> bundles = item.getBundles();
for (Bundle b : bundles)
{
Bitstream[] bitstreams = b.getBitstreams();
List<Bitstream> bitstreams = b.getBitstreams();
String bundleName = b.getName();
for (Bitstream bs : bitstreams)
{
if (filter.accept(bs))
{
if (filter.accept(bs))
{
if (isTest)
{
@@ -98,7 +96,7 @@ public class DeleteBitstreamsByFilterAction extends UpdateBitstreamsAction {
{
deleted.add(bs.getName());
}
b.removeBitstream(bs);
bundleService.removeBitstream(context, b, bs);
ItemUpdate.pr("Deleted " + bundleName + " bitstream " + bs.getName()
+ " with id = " + bs.getID());
}
@@ -122,7 +120,7 @@ public class DeleteBitstreamsByFilterAction extends UpdateBitstreamsAction {
if (!isTest)
{
MetadataUtilities.appendMetadata(item, dtom, false, sb.toString());
MetadataUtilities.appendMetadata(context, item, dtom, false, sb.toString());
}
}
}

View File

@@ -7,10 +7,14 @@
*/
package org.dspace.app.itemupdate;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Metadatum;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataValue;
import org.dspace.content.Item;
import org.dspace.core.Context;
@@ -31,17 +35,17 @@ public class DeleteMetadataAction extends UpdateMetadataAction {
* @throws ParseException
* @throws AuthorizeException
*/
public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws AuthorizeException, ParseException
{
@Override
public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws AuthorizeException, ParseException, SQLException {
Item item = itarch.getItem();
for (String f : targetFields)
{
DtoMetadata dummy = DtoMetadata.create(f, Item.ANY, "");
Metadatum[] ardcv = item.getMetadataByMetadataString(f);
List<MetadataValue> ardcv = itemService.getMetadataByMetadataString(item, f);
ItemUpdate.pr("Metadata to be deleted: ");
for (Metadatum dcv : ardcv)
for (MetadataValue dcv : ardcv)
{
ItemUpdate.pr(" " + MetadataUtilities.getDCValueString(dcv));
}
@@ -50,14 +54,16 @@ public class DeleteMetadataAction extends UpdateMetadataAction {
{
if (!suppressUndo)
{
for (Metadatum dcv : ardcv)
for (MetadataValue dcv : ardcv)
{
itarch.addUndoMetadataField(DtoMetadata.create(dcv.schema, dcv.element,
dcv.qualifier, dcv.language, dcv.value));
MetadataField metadataField = dcv.getMetadataField();
MetadataSchema metadataSchema = metadataField.getMetadataSchema();
itarch.addUndoMetadataField(DtoMetadata.create(metadataSchema.getName(), metadataField.getElement(),
metadataField.getQualifier(), dcv.getLanguage(), dcv.getValue()));
}
}
item.clearMetadata(dummy.schema, dummy.element, dummy.qualifier, Item.ANY);
itemService.clearMetadata(context, item, dummy.schema, dummy.element, dummy.qualifier, Item.ANY);
}
}
}

View File

@@ -30,7 +30,7 @@ class DtoMetadata
final String language;
final String value;
private DtoMetadata(String schema, String element, String qualifier, String language, String value)
protected DtoMetadata(String schema, String element, String qualifier, String language, String value)
{
this.schema = schema;
this.element = element;

View File

@@ -19,7 +19,9 @@ import java.io.OutputStream;
import java.io.PrintWriter;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
@@ -30,13 +32,15 @@ import javax.xml.transform.TransformerFactory;
import javax.xml.transform.TransformerConfigurationException;
import org.apache.log4j.Logger;
import org.dspace.content.ItemIterator;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.w3c.dom.Document;
@@ -48,24 +52,28 @@ public class ItemArchive {
private static final Logger log = Logger.getLogger(ItemArchive.class);
public static final String DUBLIN_CORE_XML = "dublin_core.xml";
private static DocumentBuilder builder = null;
private static Transformer transformer = null;
private List<DtoMetadata> dtomList = null;
private List<DtoMetadata> undoDtomList = new ArrayList<DtoMetadata>();
private List<Integer> undoAddContents = new ArrayList<Integer>(); // for undo of add
private Item item;
private File dir; // directory name in source archive for this item
private String dirname; //convenience
protected static DocumentBuilder builder = null;
protected Transformer transformer = null;
protected List<DtoMetadata> dtomList = null;
protected List<DtoMetadata> undoDtomList = new ArrayList<DtoMetadata>();
protected List<UUID> undoAddContents = new ArrayList<>(); // for undo of add
protected Item item;
protected File dir; // directory name in source archive for this item
protected String dirname; //convenience
protected HandleService handleService;
protected ItemService itemService;
//constructors
private ItemArchive()
protected ItemArchive()
{
// nothing
}
handleService = HandleServiceFactory.getInstance().getHandleService();
itemService = ContentServiceFactory.getInstance().getItemService();
}
/** factory method
*
@@ -119,7 +127,7 @@ public class ItemArchive {
return itarch;
}
private static DocumentBuilder getDocumentBuilder()
protected static DocumentBuilder getDocumentBuilder()
throws ParserConfigurationException
{
if (builder == null)
@@ -128,8 +136,8 @@ public class ItemArchive {
}
return builder;
}
private static Transformer getTransformer()
protected Transformer getTransformer()
throws TransformerConfigurationException
{
if (transformer == null)
@@ -188,7 +196,7 @@ public class ItemArchive {
* Add bitstream id to delete contents file
* @param bitstreamId
*/
public void addUndoDeleteContents(int bitstreamId)
public void addUndoDeleteContents(UUID bitstreamId)
{
this.undoAddContents.add(bitstreamId);
}
@@ -222,7 +230,7 @@ public class ItemArchive {
String handle = uri.substring(ItemUpdate.HANDLE_PREFIX.length());
DSpaceObject dso = HandleManager.resolveToObject(context, handle);
DSpaceObject dso = handleService.resolveToObject(context, handle);
if (dso instanceof Item)
{
item = (Item) dso;
@@ -260,16 +268,14 @@ public class ItemArchive {
this.addUndoMetadataField(dtom); //seed the undo list with the identifier field
ItemIterator itr = Item.findByMetadataField(context, dtom.schema, dtom.element, dtom.qualifier, dtom.value);
Iterator<Item> itr = itemService.findByMetadataField(context, dtom.schema, dtom.element, dtom.qualifier, dtom.value);
int count = 0;
while (itr.hasNext())
{
item = itr.next();
count++;
}
itr.close();
ItemUpdate.prv("items matching = " + count );
if (count != 1)
@@ -325,7 +331,7 @@ public class ItemArchive {
{
File f = new File(dir, ItemUpdate.DELETE_CONTENTS_FILE);
pw = new PrintWriter(new BufferedWriter(new FileWriter(f)));
for (Integer i : undoAddContents)
for (UUID i : undoAddContents)
{
pw.println(i);
}

View File

@@ -14,12 +14,7 @@ import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.*;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
@@ -28,9 +23,13 @@ import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
/**
*
@@ -73,7 +72,10 @@ public class ItemUpdate {
public static final Map<String, String> filterAliases = new HashMap<String, String>();
public static boolean verbose = false;
protected static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService();
protected static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
static
{
filterAliases.put("ORIGINAL", "org.dspace.app.itemupdate.OriginalBitstreamFilter");
@@ -85,7 +87,8 @@ public class ItemUpdate {
// File listing filter to check for folders
static FilenameFilter directoryFilter = new FilenameFilter()
{
public boolean accept(File dir, String n)
@Override
public boolean accept(File dir, String n)
{
File f = new File(dir.getAbsolutePath() + File.separatorChar + n);
return f.isDirectory();
@@ -95,7 +98,8 @@ public class ItemUpdate {
// File listing filter to check for files (not directories)
static FilenameFilter fileFilter = new FilenameFilter()
{
public boolean accept(File dir, String n)
@Override
public boolean accept(File dir, String n)
{
File f = new File(dir.getAbsolutePath() + File.separatorChar + n);
return (f.isFile());
@@ -103,9 +107,9 @@ public class ItemUpdate {
};
// instance variables
private ActionManager actionMgr = new ActionManager();
private List<String> undoActionList = new ArrayList<String>();
private String eperson;
protected ActionManager actionMgr = new ActionManager();
protected List<String> undoActionList = new ArrayList<String>();
protected String eperson;
/**
*
@@ -387,8 +391,8 @@ public class ItemUpdate {
}
System.exit(status);
}
private void processArchive(Context context, String sourceDirPath, String itemField,
protected void processArchive(Context context, String sourceDirPath, String itemField,
String metadataIndexName, boolean alterProvenance, boolean isTest)
throws Exception
{
@@ -444,9 +448,7 @@ public class ItemUpdate {
if (!isTest)
{
Item item = itarch.getItem();
item.update(); //need to update before commit
context.commit();
item.decache();
itemService.update(context, item); //need to update before commit
}
ItemUpdate.pr("Item " + dirname + " completed");
successItemCount++;
@@ -512,7 +514,7 @@ public class ItemUpdate {
* @throws FileNotFoundException
* @throws IOException
*/
private File initUndoArchive(File sourceDir)
protected File initUndoArchive(File sourceDir)
throws FileNotFoundException, IOException
{
File parentDir = sourceDir.getCanonicalFile().getParentFile();
@@ -553,7 +555,7 @@ public class ItemUpdate {
//private void write
private void setEPerson(Context context, String eperson)
protected void setEPerson(Context context, String eperson)
throws Exception
{
if (eperson == null)
@@ -567,11 +569,11 @@ public class ItemUpdate {
if (eperson.indexOf('@') != -1)
{
// @ sign, must be an email
myEPerson = EPerson.findByEmail(context, eperson);
myEPerson = epersonService.findByEmail(context, eperson);
}
else
{
myEPerson = EPerson.find(context, Integer.parseInt(eperson));
myEPerson = epersonService.find(context, UUID.fromString(eperson));
}
if (myEPerson == null)

View File

@@ -32,6 +32,10 @@ import javax.xml.transform.stream.StreamResult;
import org.apache.commons.lang.StringUtils;
import org.apache.xpath.XPathAPI;
import org.dspace.content.*;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
@@ -40,9 +44,6 @@ import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Metadatum;
import org.dspace.content.Item;
import org.dspace.content.MetadataSchema;
import org.dspace.core.ConfigurationManager;
@@ -56,7 +57,9 @@ import org.dspace.core.ConfigurationManager;
*
*/
public class MetadataUtilities {
protected static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
/**
*
* Working around Item API to delete a value-specific Metadatum
@@ -72,32 +75,31 @@ public class MetadataUtilities {
*
* @return true if metadata field is found with matching value and was deleted
*/
public static boolean deleteMetadataByValue(Item item, DtoMetadata dtom, boolean isLanguageStrict)
{
Metadatum[] ar = null;
public static boolean deleteMetadataByValue(Context context, Item item, DtoMetadata dtom, boolean isLanguageStrict) throws SQLException {
List<MetadataValue> ar = null;
if (isLanguageStrict)
{ // get all for given type
ar = item.getMetadata(dtom.schema, dtom.element, dtom.qualifier, dtom.language);
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, dtom.language);
}
else
{
ar = item.getMetadata(dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
}
boolean found = false;
//build new set minus the one to delete
List<String> vals = new ArrayList<String>();
for (Metadatum dcv : ar)
for (MetadataValue dcv : ar)
{
if (dcv.value.equals(dtom.value))
if (dcv.getValue().equals(dtom.value))
{
found = true;
}
else
{
vals.add(dcv.value);
vals.add(dcv.getValue());
}
}
@@ -105,14 +107,14 @@ public class MetadataUtilities {
{
if (isLanguageStrict)
{
item.clearMetadata(dtom.schema, dtom.element, dtom.qualifier, dtom.language);
itemService.clearMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language);
}
else
{
item.clearMetadata(dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
itemService.clearMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
}
item.addMetadata(dtom.schema, dtom.element, dtom.qualifier, dtom.language, vals.toArray(new String[vals.size()]));
itemService.addMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language, vals);
}
return found;
}
@@ -126,57 +128,56 @@ public class MetadataUtilities {
* @param textToAppend
* @throws IllegalArgumentException - When target metadata field is not found
*/
public static void appendMetadata(Item item, DtoMetadata dtom, boolean isLanguageStrict,
public static void appendMetadata(Context context, Item item, DtoMetadata dtom, boolean isLanguageStrict,
String textToAppend)
throws IllegalArgumentException
{
Metadatum[] ar = null;
throws IllegalArgumentException, SQLException {
List<MetadataValue> ar = null;
// get all values for given element/qualifier
if (isLanguageStrict) // get all for given element/qualifier
{
ar = item.getMetadata(dtom.schema, dtom.element, dtom.qualifier, dtom.language);
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, dtom.language);
}
else
{
ar = item.getMetadata(dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
}
if (ar.length == 0)
if (ar.size() == 0)
{
throw new IllegalArgumentException("Metadata to append to not found");
}
int idx = 0; //index of field to change
if (ar.length > 1) //need to pick one, can't be sure it's the last one
if (ar.size() > 1) //need to pick one, can't be sure it's the last one
{
// TODO maybe get highest id ?
}
//build new set minus the one to delete
List<String> vals = new ArrayList<String>();
for (int i=0; i < ar.length; i++)
for (int i=0; i < ar.size(); i++)
{
if (i == idx)
{
vals.add(ar[i].value + textToAppend);
vals.add(ar.get(i).getValue() + textToAppend);
}
else
{
vals.add(ar[i].value);
vals.add(ar.get(i).getValue());
}
}
if (isLanguageStrict)
{
item.clearMetadata(dtom.schema, dtom.element, dtom.qualifier, dtom.language);
itemService.clearMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language);
}
else
{
item.clearMetadata(dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
itemService.clearMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
}
item.addMetadata(dtom.schema, dtom.element, dtom.qualifier, dtom.language, vals.toArray(new String[vals.size()]));
itemService.addMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language, vals);
}
/**
@@ -424,10 +425,10 @@ public class MetadataUtilities {
* @throws FileNotFoundException
* @throws IOException
*/
public static List<Integer> readDeleteContentsFile(File f)
public static List<String> readDeleteContentsFile(File f)
throws FileNotFoundException, IOException
{
List<Integer> list = new ArrayList<Integer>();
List<String> list = new ArrayList<>();
BufferedReader in = null;
@@ -444,16 +445,7 @@ public class MetadataUtilities {
continue;
}
int n = 0;
try
{
n = Integer.parseInt(line);
list.add(n);
}
catch(NumberFormatException e)
{
ItemUpdate.pr("Error reading delete contents line:" + e.toString());
}
list.add(line);
}
}
finally
@@ -477,10 +469,12 @@ public class MetadataUtilities {
* @param dcv
* @return string displaying elements of the Metadatum
*/
public static String getDCValueString(Metadatum dcv)
public static String getDCValueString(MetadataValue dcv)
{
return "schema: " + dcv.schema + "; element: " + dcv.element + "; qualifier: " + dcv.qualifier +
"; language: " + dcv.language + "; value: " + dcv.value;
MetadataField metadataField = dcv.getMetadataField();
MetadataSchema metadataSchema = metadataField.getMetadataSchema();
return "schema: " + metadataSchema.getName() + "; element: " + metadataField.getElement() + "; qualifier: " + metadataField.getQualifier() +
"; language: " + dcv.getLanguage() + "; value: " + dcv.getValue();
}
/**

View File

@@ -8,6 +8,7 @@
package org.dspace.app.itemupdate;
import java.sql.SQLException;
import java.util.List;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
@@ -31,15 +32,16 @@ public class OriginalBitstreamFilter extends BitstreamFilterByBundleName
*
* @throws BitstreamFilterException
*/
public boolean accept(Bitstream bitstream)
@Override
public boolean accept(Bitstream bitstream)
throws BitstreamFilterException
{
try
{
Bundle[] bundles = bitstream.getBundles();
for (Bundle b : bundles)
List<Bundle> bundles = bitstream.getBundles();
for (Bundle bundle : bundles)
{
if (b.getName().equals("ORIGINAL"))
if (bundle.getName().equals("ORIGINAL"))
{
return true;
}

View File

@@ -8,6 +8,8 @@
package org.dspace.app.itemupdate;
import java.sql.SQLException;
import java.util.List;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
@@ -18,7 +20,7 @@ import org.dspace.content.Bundle;
*/
public class OriginalWithDerivativesBitstreamFilter extends BitstreamFilter
{
private String[] bundlesToEmpty = { "ORIGINAL", "TEXT", "THUMBNAIL" };
protected String[] bundlesToEmpty = { "ORIGINAL", "TEXT", "THUMBNAIL" };
public OriginalWithDerivativesBitstreamFilter()
{
@@ -32,15 +34,16 @@ public class OriginalWithDerivativesBitstreamFilter extends BitstreamFilter
* @throws BitstreamFilterException
* @return true if bitstream is in specified bundles
*/
public boolean accept(Bitstream bitstream)
@Override
public boolean accept(Bitstream bitstream)
throws BitstreamFilterException
{
try
{
Bundle[] bundles = bitstream.getBundles();
List<Bundle> bundles = bitstream.getBundles();
for (Bundle b : bundles)
{
for (String bn : bundlesToEmpty)
for (String bn : bundlesToEmpty)
{
if (b.getName().equals(bn))
{

View File

@@ -7,6 +7,8 @@
*/
package org.dspace.app.itemupdate;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
/**
@@ -15,6 +17,9 @@ import org.dspace.core.Context;
*/
public interface UpdateAction
{
public ItemService itemService = ContentServiceFactory.getInstance().getItemService();
/**
* Action to update item
*

View File

@@ -7,6 +7,10 @@
*/
package org.dspace.app.itemupdate;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.BundleService;
/**
* Base class for Bitstream actions
*
@@ -16,6 +20,10 @@ public abstract class UpdateBitstreamsAction implements UpdateAction {
protected boolean alterProvenance = true;
protected BundleService bundleService = ContentServiceFactory.getInstance().getBundleService();
protected BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();
/**
* Set variable to indicate that the dc.description.provenance field may
* be changed as a result of Bitstream changes by ItemUpdate

View File

@@ -11,6 +11,7 @@ import java.io.FileNotFoundException;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.List;
import java.util.TreeMap;
import org.dspace.core.ConfigurationManager;
import org.dspace.servicemanager.DSpaceKernelImpl;
import org.dspace.servicemanager.DSpaceKernelInit;
@@ -275,9 +276,21 @@ public class ScriptLauncher
*/
private static void display()
{
// List all command elements
List<Element> commands = commandConfigs.getRootElement().getChildren("command");
System.out.println("Usage: dspace [command-name] {parameters}");
// Sort the commands by name.
// We cannot just use commands.sort() because it tries to remove and
// reinsert Elements within other Elements, and that doesn't work.
TreeMap<String, Element> sortedCommands = new TreeMap<>();
for (Element command : commands)
{
sortedCommands.put(command.getChild("name").getValue(), command);
}
// Display the sorted list
System.out.println("Usage: dspace [command-name] {parameters}");
for (Element command : sortedCommands.values())
{
System.out.println(" - " + command.getChild("name").getValue() +
": " + command.getChild("description").getValue());

View File

@@ -19,6 +19,7 @@ import java.io.InputStream;
import javax.imageio.ImageIO;
import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager;
/**
@@ -30,6 +31,7 @@ import org.dspace.core.ConfigurationManager;
*/
public class BrandedPreviewJPEGFilter extends MediaFilter
{
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".preview.jpg";
@@ -39,6 +41,7 @@ public class BrandedPreviewJPEGFilter extends MediaFilter
* @return String bundle name
*
*/
@Override
public String getBundleName()
{
return "BRANDED_PREVIEW";
@@ -47,6 +50,7 @@ public class BrandedPreviewJPEGFilter extends MediaFilter
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString()
{
return "JPEG";
@@ -55,6 +59,7 @@ public class BrandedPreviewJPEGFilter extends MediaFilter
/**
* @return String description
*/
@Override
public String getDescription()
{
return "Generated Branded Preview";
@@ -67,7 +72,8 @@ public class BrandedPreviewJPEGFilter extends MediaFilter
*
* @return InputStream the resulting input stream
*/
public InputStream getDestinationStream(InputStream source)
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception
{
// read in bitstream's image
@@ -92,7 +98,7 @@ public class BrandedPreviewJPEGFilter extends MediaFilter
// if verbose flag is set, print out dimensions
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("original size: " + xsize + "," + ysize);
}
@@ -105,7 +111,7 @@ public class BrandedPreviewJPEGFilter extends MediaFilter
// if verbose flag is set, print out extracted text
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("x scale factor: " + scaleFactor);
}
@@ -117,7 +123,7 @@ public class BrandedPreviewJPEGFilter extends MediaFilter
// if verbose flag is set, print out extracted text
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("new size: " + xsize + "," + ysize);
}
@@ -135,7 +141,7 @@ public class BrandedPreviewJPEGFilter extends MediaFilter
}
// if verbose flag is set, print details to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("created thumbnail size: " + xsize + ", "
+ ysize);
@@ -170,7 +176,7 @@ public class BrandedPreviewJPEGFilter extends MediaFilter
Brand brand = new Brand((int) xsize, brandHeight, new Font(brandFont, Font.PLAIN, brandFontPoint), 5);
BufferedImage brandImage = brand.create(ConfigurationManager.getProperty("webui.preview.brand"),
ConfigurationManager.getProperty("webui.preview.brand.abbrev"),
MediaFilterManager.getCurrentItem() == null ? "" : "hdl:" + MediaFilterManager.getCurrentItem().getHandle());
currentItem == null ? "" : "hdl:" + currentItem.getHandle());
g2d.drawImage(brandImage, (int)0, (int)ysize, (int) xsize, (int) 20, null);

View File

@@ -0,0 +1,114 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.IOUtils;
import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory;
import org.apache.poi.hssf.extractor.ExcelExtractor;
import org.apache.poi.xssf.extractor.XSSFExcelExtractor;
import org.apache.log4j.Logger;
import org.dspace.content.Item;
/*
* ExcelFilter
*
* Entries you must add to dspace.cfg:
*
* filter.plugins = blah, \
* Excel Text Extractor
*
* plugin.named.org.dspace.app.mediafilter.FormatFilter = \
* blah = blah, \
* org.dspace.app.mediafilter.ExcelFilter = Excel Text Extractor
*
* #Configure each filter's input Formats
* filter.org.dspace.app.mediafilter.ExcelFilter.inputFormats = Microsoft Excel, Microsoft Excel XML
*
*/
public class ExcelFilter extends MediaFilter
{
private static Logger log = Logger.getLogger(ExcelFilter.class);
public String getFilteredName(String oldFilename)
{
return oldFilename + ".txt";
}
/**
* @return String bundle name
*
*/
public String getBundleName()
{
return "TEXT";
}
/**
* @return String bitstream format
*
*
*/
public String getFormatString()
{
return "Text";
}
/**
* @return String description
*/
public String getDescription()
{
return "Extracted text";
}
/**
* @param source
* source input stream
*
* @return InputStream the resulting input stream
*/
public InputStream getDestinationStream(Item item, InputStream source, boolean verbose)
throws Exception
{
String extractedText = null;
try
{
POITextExtractor theExtractor = ExtractorFactory.createExtractor(source);
if (theExtractor instanceof ExcelExtractor)
{
// for xls file
extractedText = (theExtractor).getText();
}
else if (theExtractor instanceof XSSFExcelExtractor)
{
// for xlsx file
extractedText = (theExtractor).getText();
}
}
catch (Exception e)
{
log.error("Error filtering bitstream: " + e.getMessage(), e);
throw e;
}
if (extractedText != null)
{
// generate an input stream with the extracted text
return IOUtils.toInputStream(extractedText, StandardCharsets.UTF_8);
}
return null;
}
}

View File

@@ -55,7 +55,7 @@ public interface FormatFilter
*
* @return result of filter's transformation, written out to a bitstream
*/
public InputStream getDestinationStream(InputStream source)
public InputStream getDestinationStream(Item item, InputStream source, boolean verbose)
throws Exception;
/**
@@ -77,7 +77,7 @@ public interface FormatFilter
* @return true if bitstream processing should continue,
* false if this bitstream should be skipped
*/
public boolean preProcessBitstream(Context c, Item item, Bitstream source)
public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose)
throws Exception;
/**

View File

@@ -7,6 +7,8 @@
*/
package org.dspace.app.mediafilter;
import org.dspace.content.Item;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
@@ -22,6 +24,7 @@ import javax.swing.text.html.HTMLEditorKit;
public class HTMLFilter extends MediaFilter
{
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".txt";
@@ -31,6 +34,7 @@ public class HTMLFilter extends MediaFilter
* @return String bundle name
*
*/
@Override
public String getBundleName()
{
return "TEXT";
@@ -39,6 +43,7 @@ public class HTMLFilter extends MediaFilter
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString()
{
return "Text";
@@ -47,6 +52,7 @@ public class HTMLFilter extends MediaFilter
/**
* @return String description
*/
@Override
public String getDescription()
{
return "Extracted text";
@@ -58,7 +64,8 @@ public class HTMLFilter extends MediaFilter
*
* @return InputStream the resulting input stream
*/
public InputStream getDestinationStream(InputStream source)
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception
{
// try and read the document - set to ignore character set directive,

View File

@@ -7,9 +7,12 @@
*/
package org.dspace.app.mediafilter;
import org.dspace.content.Item;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.nio.file.Files;
/**
@@ -26,13 +29,29 @@ public class ImageMagickImageThumbnailFilter extends ImageMagickThumbnailFilter
*
* @return InputStream the resulting input stream
*/
public InputStream getDestinationStream(InputStream source)
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception
{
File f = inputStreamToTempFile(source, "imthumb", ".tmp");
File f2 = getThumbnailFile(f);
return new FileInputStream(f2);
}
File f2 = null;
try
{
f2 = getThumbnailFile(f, verbose);
byte[] bytes = Files.readAllBytes(f2.toPath());
return new ByteArrayInputStream(bytes);
}
finally
{
//noinspection ResultOfMethodCallIgnored
f.delete();
if (f2 != null)
{
//noinspection ResultOfMethodCallIgnored
f2.delete();
}
}
}
}

View File

@@ -7,21 +7,47 @@
*/
package org.dspace.app.mediafilter;
import org.dspace.content.Item;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.nio.file.Files;
public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter {
public InputStream getDestinationStream(InputStream source)
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception
{
File f = inputStreamToTempFile(source, "impdfthumb", ".pdf");
File f2 = getImageFile(f, 0);
File f3 = getThumbnailFile(f2);
return new FileInputStream(f3);
File f2 = null;
File f3 = null;
try
{
f2 = getImageFile(f, 0, verbose);
f3 = getThumbnailFile(f2, verbose);
byte[] bytes = Files.readAllBytes(f3.toPath());
return new ByteArrayInputStream(bytes);
}
finally
{
//noinspection ResultOfMethodCallIgnored
f.delete();
if (f2 != null)
{
//noinspection ResultOfMethodCallIgnored
f2.delete();
}
if (f3 != null)
{
//noinspection ResultOfMethodCallIgnored
f3.delete();
}
}
}
public static final String[] PDF = {"Adobe PDF"};
@Override
public String[] getInputMIMETypes()
{
return PDF;

View File

@@ -16,11 +16,11 @@ import java.util.regex.PatternSyntaxException;
import javax.imageio.ImageIO;
import org.dspace.app.mediafilter.MediaFilter;
import org.dspace.app.mediafilter.SelfRegisterInputFormats;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.im4java.core.ConvertCmd;
import org.im4java.core.IM4JavaException;
@@ -36,11 +36,13 @@ import org.dspace.core.ConfigurationManager;
*/
public abstract class ImageMagickThumbnailFilter extends MediaFilter implements SelfRegisterInputFormats
{
private static int width = 180;
private static int height = 120;
protected static int width = 180;
protected static int height = 120;
private static boolean flatten = true;
static String bitstreamDescription = "IM Thumbnail";
static final String defaultPattern = "Generated Thumbnail";
static Pattern replaceRegex = Pattern.compile(defaultPattern);
protected final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
static {
String pre = ImageMagickThumbnailFilter.class.getName();
@@ -48,6 +50,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
ProcessStarter.setGlobalSearchPath(s);
width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width);
height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height);
flatten = ConfigurationManager.getBooleanProperty(pre + ".flatten", flatten);
String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription");
if (description != null) {
bitstreamDescription = description;
@@ -58,13 +61,13 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
} catch(PatternSyntaxException e) {
System.err.println("Invalid thumbnail replacement pattern: "+e.getMessage());
}
}
public ImageMagickThumbnailFilter() {
}
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".jpg";
@@ -74,6 +77,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
* @return String bundle name
*
*/
@Override
public String getBundleName()
{
return "THUMBNAIL";
@@ -82,6 +86,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString()
{
return "JPEG";
@@ -90,12 +95,13 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
/**
* @return String bitstreamDescription
*/
@Override
public String getDescription()
{
return bitstreamDescription;
}
public static File inputStreamToTempFile(InputStream source, String prefix, String suffix) throws IOException {
public File inputStreamToTempFile(InputStream source, String prefix, String suffix) throws IOException {
File f = File.createTempFile(prefix, suffix);
f.deleteOnExit();
FileOutputStream fos = new FileOutputStream(f);
@@ -110,7 +116,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
return f;
}
public static File getThumbnailFile(File f) throws IOException, InterruptedException, IM4JavaException {
public File getThumbnailFile(File f, boolean verbose) throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
@@ -118,35 +124,40 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
op.addImage(f.getAbsolutePath());
op.thumbnail(width, height);
op.addImage(f2.getAbsolutePath());
if (MediaFilterManager.isVerbose) {
if (verbose) {
System.out.println("IM Thumbnail Param: "+op);
}
cmd.run(op);
return f2;
}
public static File getImageFile(File f, int page) throws IOException, InterruptedException, IM4JavaException {
public File getImageFile(File f, int page, boolean verbose) throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
String s = "[" + page + "]";
op.addImage(f.getAbsolutePath()+s);
if (flatten)
{
op.flatten();
}
op.addImage(f2.getAbsolutePath());
if (MediaFilterManager.isVerbose) {
if (verbose) {
System.out.println("IM Image Param: "+op);
}
cmd.run(op);
return f2;
}
public boolean preProcessBitstream(Context c, Item item, Bitstream source)
@Override
public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose)
throws Exception
{
String nsrc = source.getName();
for(Bundle b: item.getBundles("THUMBNAIL")) {
for(Bundle b: itemService.getBundles(item, "THUMBNAIL")) {
for(Bitstream bit: b.getBitstreams()) {
String n = bit.getName();
String n = bit.getName();
if (n != null) {
if (nsrc != null) {
if (!n.startsWith(nsrc)) continue;
@@ -156,13 +167,13 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
//If anything other than a generated thumbnail is found, halt processing
if (description != null) {
if (replaceRegex.matcher(description).matches()) {
if (MediaFilterManager.isVerbose) {
if (verbose) {
System.out.println(description + " " + nsrc + " matches pattern and is replacable.");
}
continue;
}
if (description.equals(bitstreamDescription)) {
if (MediaFilterManager.isVerbose) {
if (verbose) {
System.out.println(bitstreamDescription + " " + nsrc + " is replacable.");
}
continue;
@@ -177,16 +188,19 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
return true; //assume that the thumbnail is a custom one
}
@Override
public String[] getInputMIMETypes()
{
return ImageIO.getReaderMIMETypes();
}
@Override
public String[] getInputDescriptions()
{
return null;
}
@Override
public String[] getInputExtensions()
{
return ImageIO.getReaderFileSuffixes();

View File

@@ -18,6 +18,7 @@ import java.io.InputStream;
import javax.imageio.ImageIO;
import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager;
/**
@@ -29,6 +30,7 @@ import org.dspace.core.ConfigurationManager;
*/
public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
{
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".jpg";
@@ -38,6 +40,7 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
* @return String bundle name
*
*/
@Override
public String getBundleName()
{
return "THUMBNAIL";
@@ -46,6 +49,7 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString()
{
return "JPEG";
@@ -54,6 +58,7 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
/**
* @return String description
*/
@Override
public String getDescription()
{
return "Generated Thumbnail";
@@ -65,7 +70,8 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
*
* @return InputStream the resulting input stream
*/
public InputStream getDestinationStream(InputStream source)
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception
{
// read in bitstream's image
@@ -87,7 +93,7 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
// if verbose flag is set, print out dimensions
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("original size: " + xsize + "," + ysize);
}
@@ -100,7 +106,7 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
// if verbose flag is set, print out extracted text
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("x scale factor: " + scale_factor);
}
@@ -112,7 +118,7 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
// if verbose flag is set, print out extracted text
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("new size: " + xsize + "," + ysize);
}
@@ -130,7 +136,7 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
}
// if verbose flag is set, print details to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("created thumbnail size: " + xsize + ", "
+ ysize);
@@ -173,16 +179,19 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
}
@Override
public String[] getInputMIMETypes()
{
return ImageIO.getReaderMIMETypes();
}
@Override
public String[] getInputDescriptions()
{
return null;
}
@Override
public String[] getInputExtensions()
{
// Temporarily disabled as JDK 1.6 only

View File

@@ -39,7 +39,8 @@ public abstract class MediaFilter implements FormatFilter
* @return true if bitstream processing should continue,
* false if this bitstream should be skipped
*/
public boolean preProcessBitstream(Context c, Item item, Bitstream source)
@Override
public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose)
throws Exception
{
return true; //default to no pre-processing
@@ -62,6 +63,7 @@ public abstract class MediaFilter implements FormatFilter
* the bitstream which was generated by
* this filter.
*/
@Override
public void postProcessBitstream(Context c, Item item, Bitstream generatedBitstream)
throws Exception
{

View File

@@ -0,0 +1,332 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import org.apache.commons.cli.*;
import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory;
import org.dspace.app.mediafilter.service.MediaFilterService;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.*;
import org.dspace.handle.factory.HandleServiceFactory;
import java.util.*;
/**
* MediaFilterManager is the class that invokes the media/format filters over the
* repository's content. A few command line flags affect the operation of the
* MFM: -v verbose outputs all extracted text to STDOUT; -f force forces all
* bitstreams to be processed, even if they have been before; -n noindex does not
* recreate index after processing bitstreams; -i [identifier] limits processing
* scope to a community, collection or item; and -m [max] limits processing to a
* maximum number of items.
*/
public class MediaFilterCLITool {
//key (in dspace.cfg) which lists all enabled filters by name
private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins";
//prefix (in dspace.cfg) for all filter properties
private static final String FILTER_PREFIX = "filter";
//suffix (in dspace.cfg) for input formats supported by each filter
private static final String INPUT_FORMATS_SUFFIX = "inputFormats";
public static void main(String[] argv) throws Exception
{
// set headless for non-gui workstations
System.setProperty("java.awt.headless", "true");
// create an options object and populate it
CommandLineParser parser = new PosixParser();
int status = 0;
Options options = new Options();
options.addOption("v", "verbose", false,
"print all extracted text and other details to STDOUT");
options.addOption("q", "quiet", false,
"do not print anything except in the event of errors.");
options.addOption("f", "force", false,
"force all bitstreams to be processed");
options.addOption("i", "identifier", true,
"ONLY process bitstreams belonging to identifier");
options.addOption("m", "maximum", true,
"process no more than maximum items");
options.addOption("h", "help", false, "help");
//create a "plugin" option (to specify specific MediaFilter plugins to run)
OptionBuilder.withLongOpt("plugins");
OptionBuilder.withValueSeparator(',');
OptionBuilder.withDescription(
"ONLY run the specified Media Filter plugin(s)\n" +
"listed from '" + MEDIA_FILTER_PLUGINS_KEY + "' in dspace.cfg.\n" +
"Separate multiple with a comma (,)\n" +
"(e.g. MediaFilterManager -p \n\"Word Text Extractor\",\"PDF Text Extractor\")");
Option pluginOption = OptionBuilder.create('p');
pluginOption.setArgs(Option.UNLIMITED_VALUES); //unlimited number of args
options.addOption(pluginOption);
//create a "skip" option (to specify communities/collections/items to skip)
OptionBuilder.withLongOpt("skip");
OptionBuilder.withValueSeparator(',');
OptionBuilder.withDescription(
"SKIP the bitstreams belonging to identifier\n" +
"Separate multiple identifiers with a comma (,)\n" +
"(e.g. MediaFilterManager -s \n 123456789/34,123456789/323)");
Option skipOption = OptionBuilder.create('s');
skipOption.setArgs(Option.UNLIMITED_VALUES); //unlimited number of args
options.addOption(skipOption);
boolean isVerbose = false;
boolean isQuiet = false;
boolean isForce = false; // default to not forced
String identifier = null; // object scope limiter
int max2Process = Integer.MAX_VALUE;
Map<String, List<String>> filterFormats = new HashMap<>();
CommandLine line = null;
try
{
line = parser.parse(options, argv);
}
catch(MissingArgumentException e)
{
System.out.println("ERROR: " + e.getMessage());
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(1);
}
if (line.hasOption('h'))
{
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(0);
}
if (line.hasOption('v'))
{
isVerbose = true;
}
isQuiet = line.hasOption('q');
if (line.hasOption('f'))
{
isForce = true;
}
if (line.hasOption('i'))
{
identifier = line.getOptionValue('i');
}
if (line.hasOption('m'))
{
max2Process = Integer.parseInt(line.getOptionValue('m'));
if (max2Process <= 1)
{
System.out.println("Invalid maximum value '" +
line.getOptionValue('m') + "' - ignoring");
max2Process = Integer.MAX_VALUE;
}
}
String filterNames[] = null;
if(line.hasOption('p'))
{
//specified which media filter plugins we are using
filterNames = line.getOptionValues('p');
if(filterNames==null || filterNames.length==0)
{ //display error, since no plugins specified
System.err.println("\nERROR: -p (-plugin) option requires at least one plugin to be specified.\n" +
"(e.g. MediaFilterManager -p \"Word Text Extractor\",\"PDF Text Extractor\")\n");
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(1);
}
}
else
{
//retrieve list of all enabled media filter plugins!
String enabledPlugins = ConfigurationManager.getProperty(MEDIA_FILTER_PLUGINS_KEY);
filterNames = enabledPlugins.split(",\\s*");
}
MediaFilterService mediaFilterService = MediaFilterServiceFactory.getInstance().getMediaFilterService();
mediaFilterService.setForce(isForce);
mediaFilterService.setQuiet(isQuiet);
mediaFilterService.setVerbose(isVerbose);
mediaFilterService.setMax2Process(max2Process);
//initialize an array of our enabled filters
List<FormatFilter> filterList = new ArrayList<FormatFilter>();
//set up each filter
for(int i=0; i< filterNames.length; i++)
{
//get filter of this name & add to list of filters
FormatFilter filter = (FormatFilter) PluginManager.getNamedPlugin(FormatFilter.class, filterNames[i]);
if(filter==null)
{
System.err.println("\nERROR: Unknown MediaFilter specified (either from command-line or in dspace.cfg): '" + filterNames[i] + "'");
System.exit(1);
}
else
{
filterList.add(filter);
String filterClassName = filter.getClass().getName();
String pluginName = null;
//If this filter is a SelfNamedPlugin,
//then the input formats it accepts may differ for
//each "named" plugin that it defines.
//So, we have to look for every key that fits the
//following format: filter.<class-name>.<plugin-name>.inputFormats
if( SelfNamedPlugin.class.isAssignableFrom(filter.getClass()) )
{
//Get the plugin instance name for this class
pluginName = ((SelfNamedPlugin) filter).getPluginInstanceName();
}
//Retrieve our list of supported formats from dspace.cfg
//For SelfNamedPlugins, format of key is:
// filter.<class-name>.<plugin-name>.inputFormats
//For other MediaFilters, format of key is:
// filter.<class-name>.inputFormats
String formats = ConfigurationManager.getProperty(
FILTER_PREFIX + "." + filterClassName +
(pluginName!=null ? "." + pluginName : "") +
"." + INPUT_FORMATS_SUFFIX);
//add to internal map of filters to supported formats
if (formats != null)
{
//For SelfNamedPlugins, map key is:
// <class-name><separator><plugin-name>
//For other MediaFilters, map key is just:
// <class-name>
filterFormats.put(filterClassName +
(pluginName!=null ? MediaFilterService.FILTER_PLUGIN_SEPARATOR + pluginName : ""),
Arrays.asList(formats.split(",[\\s]*")));
}
}//end if filter!=null
}//end for
//If verbose, print out loaded mediafilter info
if(isVerbose)
{
System.out.println("The following MediaFilters are enabled: ");
Iterator<String> i = filterFormats.keySet().iterator();
while(i.hasNext())
{
String filterName = i.next();
System.out.println("Full Filter Name: " + filterName);
String pluginName = null;
if(filterName.contains(MediaFilterService.FILTER_PLUGIN_SEPARATOR))
{
String[] fields = filterName.split(MediaFilterService.FILTER_PLUGIN_SEPARATOR);
filterName=fields[0];
pluginName=fields[1];
}
System.out.println(filterName +
(pluginName!=null? " (Plugin: " + pluginName + ")": ""));
}
}
mediaFilterService.setFilterFormats(filterFormats);
//store our filter list into an internal array
mediaFilterService.setFilterClasses(filterList);
//Retrieve list of identifiers to skip (if any)
String skipIds[] = null;
if(line.hasOption('s'))
{
//specified which identifiers to skip when processing
skipIds = line.getOptionValues('s');
if(skipIds==null || skipIds.length==0)
{ //display error, since no identifiers specified to skip
System.err.println("\nERROR: -s (-skip) option requires at least one identifier to SKIP.\n" +
"Make sure to separate multiple identifiers with a comma!\n" +
"(e.g. MediaFilterManager -s 123456789/34,123456789/323)\n");
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(0);
}
//save to a global skip list
mediaFilterService.setSkipList(Arrays.asList(skipIds));
}
Context c = null;
try
{
c = new Context();
// have to be super-user to do the filtering
c.turnOffAuthorisationSystem();
// now apply the filters
if (identifier == null)
{
mediaFilterService.applyFiltersAllItems(c);
}
else // restrict application scope to identifier
{
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(c, identifier);
if (dso == null)
{
throw new IllegalArgumentException("Cannot resolve "
+ identifier + " to a DSpace object");
}
switch (dso.getType())
{
case Constants.COMMUNITY:
mediaFilterService.applyFiltersCommunity(c, (Community) dso);
break;
case Constants.COLLECTION:
mediaFilterService.applyFiltersCollection(c, (Collection) dso);
break;
case Constants.ITEM:
mediaFilterService.applyFiltersItem(c, (Item) dso);
break;
}
}
c.complete();
c = null;
}
catch (Exception e)
{
status = 1;
}
finally
{
if (c != null)
{
c.abort();
}
}
System.exit(status);
}
}

View File

@@ -1,850 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.MissingArgumentException;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang.ArrayUtils;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.Bundle;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DCDate;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.ItemIterator;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.PluginManager;
import org.dspace.core.SelfNamedPlugin;
import org.dspace.eperson.Group;
import org.dspace.handle.HandleManager;
import org.dspace.search.DSIndexer;
/**
* MediaFilterManager is the class that invokes the media/format filters over the
* repository's content. A few command line flags affect the operation of the
* MFM: -v verbose outputs all extracted text to STDOUT; -f force forces all
* bitstreams to be processed, even if they have been before; -n noindex does not
* recreate index after processing bitstreams; -i [identifier] limits processing
* scope to a community, collection or item; and -m [max] limits processing to a
* maximum number of items.
*/
public class MediaFilterManager
{
private static final Logger log = Logger.getLogger(MediaFilterManager.class);
//key (in dspace.cfg) which lists all enabled filters by name
public static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins";
//prefix (in dspace.cfg) for all filter properties
public static final String FILTER_PREFIX = "filter";
//suffix (in dspace.cfg) for input formats supported by each filter
public static final String INPUT_FORMATS_SUFFIX = "inputFormats";
static boolean isVerbose = false; // default to not verbose
static boolean isQuiet = false; // default is noisy
static boolean isForce = false; // default to not forced
static String identifier = null; // object scope limiter
static int max2Process = Integer.MAX_VALUE; // maximum number items to process
static int processed = 0; // number items processed
private static Item currentItem = null; // current item being processed
private static FormatFilter[] filterClasses = null;
private static final Map<String, List<String>> filterFormats = new HashMap<>();
private static List<String> skipList = null; //list of identifiers to skip during processing
private static final List<String> publicFiltersClasses = new ArrayList<>();
//separator in filterFormats Map between a filter class name and a plugin name,
//for MediaFilters which extend SelfNamedPlugin (\034 is "file separator" char)
public static final String FILTER_PLUGIN_SEPARATOR = "\034";
static {
String publicPermissionFilters = ConfigurationManager.getProperty("filter.org.dspace.app.mediafilter.publicPermission");
if(publicPermissionFilters != null) {
String[] publicPermisionFiltersArray = publicPermissionFilters.split(",");
for(String filter : publicPermisionFiltersArray) {
publicFiltersClasses.add(filter.trim());
}
}
}
public static void main(String[] argv) throws Exception
{
// set headless for non-gui workstations
System.setProperty("java.awt.headless", "true");
// create an options object and populate it
CommandLineParser parser = new PosixParser();
int status = 0;
Options options = new Options();
options.addOption("v", "verbose", false,
"print all extracted text and other details to STDOUT");
options.addOption("q", "quiet", false,
"do not print anything except in the event of errors.");
options.addOption("f", "force", false,
"force all bitstreams to be processed");
options.addOption("i", "identifier", true,
"ONLY process bitstreams belonging to identifier");
options.addOption("m", "maximum", true,
"process no more than maximum items");
options.addOption("h", "help", false, "help");
//create a "plugin" option (to specify specific MediaFilter plugins to run)
OptionBuilder.withLongOpt("plugins");
OptionBuilder.withValueSeparator(',');
OptionBuilder.withDescription(
"ONLY run the specified Media Filter plugin(s)\n" +
"listed from '" + MEDIA_FILTER_PLUGINS_KEY + "' in dspace.cfg.\n" +
"Separate multiple with a comma (,)\n" +
"(e.g. MediaFilterManager -p \n\"Word Text Extractor\",\"PDF Text Extractor\")");
Option pluginOption = OptionBuilder.create('p');
pluginOption.setArgs(Option.UNLIMITED_VALUES); //unlimited number of args
options.addOption(pluginOption);
//create a "skip" option (to specify communities/collections/items to skip)
OptionBuilder.withLongOpt("skip");
OptionBuilder.withValueSeparator(',');
OptionBuilder.withDescription(
"SKIP the bitstreams belonging to identifier\n" +
"Separate multiple identifiers with a comma (,)\n" +
"(e.g. MediaFilterManager -s \n 123456789/34,123456789/323)");
Option skipOption = OptionBuilder.create('s');
skipOption.setArgs(Option.UNLIMITED_VALUES); //unlimited number of args
options.addOption(skipOption);
CommandLine line = null;
try
{
line = parser.parse(options, argv);
}
catch(MissingArgumentException e)
{
System.out.println("ERROR: " + e.getMessage());
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(1);
}
if (line.hasOption('h'))
{
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(0);
}
if (line.hasOption('v'))
{
isVerbose = true;
}
isQuiet = line.hasOption('q');
if (line.hasOption('f'))
{
isForce = true;
}
if (line.hasOption('i'))
{
identifier = line.getOptionValue('i');
}
if (line.hasOption('m'))
{
max2Process = Integer.parseInt(line.getOptionValue('m'));
if (max2Process <= 1)
{
System.out.println("Invalid maximum value '" +
line.getOptionValue('m') + "' - ignoring");
max2Process = Integer.MAX_VALUE;
}
}
String filterNames[] = null;
if(line.hasOption('p'))
{
//specified which media filter plugins we are using
filterNames = line.getOptionValues('p');
if(filterNames==null || filterNames.length==0)
{ //display error, since no plugins specified
System.err.println("\nERROR: -p (-plugin) option requires at least one plugin to be specified.\n" +
"(e.g. MediaFilterManager -p \"Word Text Extractor\",\"PDF Text Extractor\")\n");
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(1);
}
}
else
{
//retrieve list of all enabled media filter plugins!
String enabledPlugins = ConfigurationManager.getProperty(MEDIA_FILTER_PLUGINS_KEY);
filterNames = enabledPlugins.split(",\\s*");
}
//initialize an array of our enabled filters
List<FormatFilter> filterList = new ArrayList<FormatFilter>();
//set up each filter
for(int i=0; i< filterNames.length; i++)
{
//get filter of this name & add to list of filters
FormatFilter filter = (FormatFilter) PluginManager.getNamedPlugin(FormatFilter.class, filterNames[i]);
if(filter==null)
{
System.err.println("\nERROR: Unknown MediaFilter specified (either from command-line or in dspace.cfg): '" + filterNames[i] + "'");
System.exit(1);
}
else
{
filterList.add(filter);
String filterClassName = filter.getClass().getName();
String pluginName = null;
//If this filter is a SelfNamedPlugin,
//then the input formats it accepts may differ for
//each "named" plugin that it defines.
//So, we have to look for every key that fits the
//following format: filter.<class-name>.<plugin-name>.inputFormats
if( SelfNamedPlugin.class.isAssignableFrom(filter.getClass()) )
{
//Get the plugin instance name for this class
pluginName = ((SelfNamedPlugin) filter).getPluginInstanceName();
}
//Retrieve our list of supported formats from dspace.cfg
//For SelfNamedPlugins, format of key is:
// filter.<class-name>.<plugin-name>.inputFormats
//For other MediaFilters, format of key is:
// filter.<class-name>.inputFormats
String formats = ConfigurationManager.getProperty(
FILTER_PREFIX + "." + filterClassName +
(pluginName!=null ? "." + pluginName : "") +
"." + INPUT_FORMATS_SUFFIX);
//add to internal map of filters to supported formats
if (formats != null)
{
//For SelfNamedPlugins, map key is:
// <class-name><separator><plugin-name>
//For other MediaFilters, map key is just:
// <class-name>
filterFormats.put(filterClassName +
(pluginName!=null ? FILTER_PLUGIN_SEPARATOR + pluginName : ""),
Arrays.asList(formats.split(",[\\s]*")));
}
}//end if filter!=null
}//end for
//If verbose, print out loaded mediafilter info
if(isVerbose)
{
System.out.println("The following MediaFilters are enabled: ");
Iterator<String> i = filterFormats.keySet().iterator();
while(i.hasNext())
{
String filterName = i.next();
System.out.println("Full Filter Name: " + filterName);
String pluginName = null;
if(filterName.contains(FILTER_PLUGIN_SEPARATOR))
{
String[] fields = filterName.split(FILTER_PLUGIN_SEPARATOR);
filterName=fields[0];
pluginName=fields[1];
}
System.out.println(filterName +
(pluginName!=null? " (Plugin: " + pluginName + ")": ""));
}
}
//store our filter list into an internal array
filterClasses = (FormatFilter[]) filterList.toArray(new FormatFilter[filterList.size()]);
//Retrieve list of identifiers to skip (if any)
String skipIds[] = null;
if(line.hasOption('s'))
{
//specified which identifiers to skip when processing
skipIds = line.getOptionValues('s');
if(skipIds==null || skipIds.length==0)
{ //display error, since no identifiers specified to skip
System.err.println("\nERROR: -s (-skip) option requires at least one identifier to SKIP.\n" +
"Make sure to separate multiple identifiers with a comma!\n" +
"(e.g. MediaFilterManager -s 123456789/34,123456789/323)\n");
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(0);
}
//save to a global skip list
skipList = Arrays.asList(skipIds);
}
Context c = null;
try
{
c = new Context();
// have to be super-user to do the filtering
c.turnOffAuthorisationSystem();
// now apply the filters
if (identifier == null)
{
applyFiltersAllItems(c);
}
else // restrict application scope to identifier
{
DSpaceObject dso = HandleManager.resolveToObject(c, identifier);
if (dso == null)
{
throw new IllegalArgumentException("Cannot resolve "
+ identifier + " to a DSpace object");
}
switch (dso.getType())
{
case Constants.COMMUNITY:
applyFiltersCommunity(c, (Community)dso);
break;
case Constants.COLLECTION:
applyFiltersCollection(c, (Collection)dso);
break;
case Constants.ITEM:
applyFiltersItem(c, (Item)dso);
break;
}
}
c.complete();
c = null;
}
catch (Exception e)
{
status = 1;
}
finally
{
if (c != null)
{
c.abort();
}
}
System.exit(status);
}
public static void applyFiltersAllItems(Context c) throws Exception
{
if(skipList!=null)
{
//if a skip-list exists, we need to filter community-by-community
//so we can respect what is in the skip-list
Community[] topLevelCommunities = Community.findAllTop(c);
for(int i=0; i<topLevelCommunities.length; i++)
{
applyFiltersCommunity(c, topLevelCommunities[i]);
}
}
else
{
//otherwise, just find every item and process
ItemIterator i = Item.findAll(c);
try
{
while (i.hasNext() && processed < max2Process)
{
applyFiltersItem(c, i.next());
}
}
finally
{
if (i != null)
{
i.close();
}
}
}
}
public static void applyFiltersCommunity(Context c, Community community)
throws Exception
{ //only apply filters if community not in skip-list
if(!inSkipList(community.getHandle()))
{
Community[] subcommunities = community.getSubcommunities();
for (int i = 0; i < subcommunities.length; i++)
{
applyFiltersCommunity(c, subcommunities[i]);
}
Collection[] collections = community.getCollections();
for (int j = 0; j < collections.length; j++)
{
applyFiltersCollection(c, collections[j]);
}
}
}
public static void applyFiltersCollection(Context c, Collection collection)
throws Exception
{
//only apply filters if collection not in skip-list
if(!inSkipList(collection.getHandle()))
{
ItemIterator i = collection.getItems();
try
{
while (i.hasNext() && processed < max2Process)
{
applyFiltersItem(c, i.next());
}
}
finally
{
if (i != null)
{
i.close();
}
}
}
}
public static void applyFiltersItem(Context c, Item item) throws Exception
{
//only apply filters if item not in skip-list
if(!inSkipList(item.getHandle()))
{
//cache this item in MediaFilterManager
//so it can be accessed by MediaFilters as necessary
currentItem = item;
if (filterItem(c, item))
{
// commit changes after each filtered item
c.commit();
// increment processed count
++processed;
}
// clear item objects from context cache and internal cache
item.decache();
currentItem = null;
}
}
/**
* Iterate through the item's bitstreams in the ORIGINAL bundle, applying
* filters if possible.
*
* @return true if any bitstreams processed,
* false if none
*/
public static boolean filterItem(Context c, Item myItem) throws Exception
{
// get 'original' bundles
Bundle[] myBundles = myItem.getBundles("ORIGINAL");
boolean done = false;
for (int i = 0; i < myBundles.length; i++)
{
// now look at all of the bitstreams
Bitstream[] myBitstreams = myBundles[i].getBitstreams();
for (int k = 0; k < myBitstreams.length; k++)
{
done |= filterBitstream(c, myItem, myBitstreams[k]);
}
}
return done;
}
/**
* Attempt to filter a bitstream.
*
* An exception will be thrown if the media filter class cannot be
* instantiated. Exceptions from filtering will be logged to STDOUT and
* swallowed.
*
* @return true if bitstream processed,
* false if no applicable filter or already processed
*/
public static boolean filterBitstream(Context c, Item myItem,
Bitstream myBitstream) throws Exception
{
boolean filtered = false;
// iterate through filter classes. A single format may be actioned
// by more than one filter
for (int i = 0; i < filterClasses.length; i++)
{
//List fmts = (List)filterFormats.get(filterClasses[i].getClass().getName());
String pluginName = null;
//if this filter class is a SelfNamedPlugin,
//its list of supported formats is different for
//differently named "plugin"
if( SelfNamedPlugin.class.isAssignableFrom(filterClasses[i].getClass()) )
{
//get plugin instance name for this media filter
pluginName = ((SelfNamedPlugin)filterClasses[i]).getPluginInstanceName();
}
//Get list of supported formats for the filter (and possibly named plugin)
//For SelfNamedPlugins, map key is:
// <class-name><separator><plugin-name>
//For other MediaFilters, map key is just:
// <class-name>
List<String> fmts = filterFormats.get(filterClasses[i].getClass().getName() +
(pluginName!=null ? FILTER_PLUGIN_SEPARATOR + pluginName : ""));
if (fmts.contains(myBitstream.getFormat().getShortDescription()))
{
try
{
// only update item if bitstream not skipped
if (processBitstream(c, myItem, myBitstream, filterClasses[i]))
{
myItem.update(); // Make sure new bitstream has a sequence
// number
filtered = true;
}
}
catch (Exception e)
{
String handle = myItem.getHandle();
Bundle[] bundles = myBitstream.getBundles();
long size = myBitstream.getSize();
String checksum = myBitstream.getChecksum() + " ("+myBitstream.getChecksumAlgorithm()+")";
int assetstore = myBitstream.getStoreNumber();
// Printout helpful information to find the errored bitstream.
System.out.println("ERROR filtering, skipping bitstream:\n");
System.out.println("\tItem Handle: "+ handle);
for (Bundle bundle : bundles)
{
System.out.println("\tBundle Name: " + bundle.getName());
}
System.out.println("\tFile Size: " + size);
System.out.println("\tChecksum: " + checksum);
System.out.println("\tAsset Store: " + assetstore);
System.out.println(e);
e.printStackTrace();
}
}
else if (filterClasses[i] instanceof SelfRegisterInputFormats)
{
// Filter implements self registration, so check to see if it should be applied
// given the formats it claims to support
SelfRegisterInputFormats srif = (SelfRegisterInputFormats)filterClasses[i];
boolean applyFilter = false;
// Check MIME type
String[] mimeTypes = srif.getInputMIMETypes();
if (mimeTypes != null)
{
for (String mimeType : mimeTypes)
{
if (mimeType.equalsIgnoreCase(myBitstream.getFormat().getMIMEType()))
{
applyFilter = true;
}
}
}
// Check description
if (!applyFilter)
{
String[] descriptions = srif.getInputDescriptions();
if (descriptions != null)
{
for (String desc : descriptions)
{
if (desc.equalsIgnoreCase(myBitstream.getFormat().getShortDescription()))
{
applyFilter = true;
}
}
}
}
// Check extensions
if (!applyFilter)
{
String[] extensions = srif.getInputExtensions();
if (extensions != null)
{
for (String ext : extensions)
{
String[] formatExtensions = myBitstream.getFormat().getExtensions();
if (formatExtensions != null && ArrayUtils.contains(formatExtensions, ext))
{
applyFilter = true;
}
}
}
}
// Filter claims to handle this type of file, so attempt to apply it
if (applyFilter)
{
try
{
// only update item if bitstream not skipped
if (processBitstream(c, myItem, myBitstream, filterClasses[i]))
{
myItem.update(); // Make sure new bitstream has a sequence
// number
filtered = true;
}
}
catch (Exception e)
{
System.out.println("ERROR filtering, skipping bitstream #"
+ myBitstream.getID() + " " + e);
e.printStackTrace();
}
}
}
}
return filtered;
}
/**
* A utility class that calls the virtual methods
* from the current MediaFilter class.
* It scans the bitstreams in an item, and decides if a bitstream has
* already been filtered, and if not or if overWrite is set, invokes the
* filter.
*
* @param c
* context
* @param item
* item containing bitstream to process
* @param source
* source bitstream to process
* @param formatFilter
* FormatFilter to perform filtering
*
* @return true if new rendition is created, false if rendition already
* exists and overWrite is not set
*/
public static boolean processBitstream(Context c, Item item, Bitstream source, FormatFilter formatFilter)
throws Exception
{
//do pre-processing of this bitstream, and if it fails, skip this bitstream!
if(!formatFilter.preProcessBitstream(c, item, source))
{
return false;
}
boolean overWrite = MediaFilterManager.isForce;
// get bitstream filename, calculate destination filename
String newName = formatFilter.getFilteredName(source.getName());
Bitstream existingBitstream = null; // is there an existing rendition?
Bundle targetBundle = null; // bundle we're modifying
Bundle[] bundles = item.getBundles(formatFilter.getBundleName());
// check if destination bitstream exists
if (bundles.length > 0)
{
// only finds the last match (FIXME?)
for (int i = 0; i < bundles.length; i++)
{
Bitstream[] bitstreams = bundles[i].getBitstreams();
for (int j = 0; j < bitstreams.length; j++)
{
if (bitstreams[j].getName().equals(newName))
{
targetBundle = bundles[i];
existingBitstream = bitstreams[j];
}
}
}
}
// if exists and overwrite = false, exit
if (!overWrite && (existingBitstream != null))
{
if (!isQuiet)
{
System.out.println("SKIPPED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") because '" + newName + "' already exists");
}
return false;
}
if(isVerbose) {
System.out.println("PROCESSING: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ")");
}
InputStream destStream;
try {
System.out.println("File: " + newName);
destStream = formatFilter.getDestinationStream(source.retrieve());
if (destStream == null)
{
if (!isQuiet)
{
System.out.println("SKIPPED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") because filtering was unsuccessful");
}
return false;
}
}
catch (OutOfMemoryError oome)
{
System.out.println("!!! OutOfMemoryError !!!");
return false;
}
// create new bundle if needed
if (bundles.length < 1)
{
targetBundle = item.createBundle(formatFilter.getBundleName());
}
else
{
// take the first match
targetBundle = bundles[0];
}
Bitstream b = targetBundle.createBitstream(destStream);
// Now set the format and name of the bitstream
b.setName(newName);
b.setSource("Written by FormatFilter " + formatFilter.getClass().getName() +
" on " + DCDate.getCurrent() + " (GMT).");
b.setDescription(formatFilter.getDescription());
// Find the proper format
BitstreamFormat bf = BitstreamFormat.findByShortDescription(c,
formatFilter.getFormatString());
b.setFormat(bf);
b.update();
//Set permissions on the derivative bitstream
//- First remove any existing policies
AuthorizeManager.removeAllPolicies(c, b);
//- Determine if this is a public-derivative format
if(publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) {
//- Set derivative bitstream to be publicly accessible
Group anonymous = Group.find(c, 0);
AuthorizeManager.addPolicy(c, b, Constants.READ, anonymous);
} else {
//- Inherit policies from the source bitstream
AuthorizeManager.inheritPolicies(c, source, b);
}
// fixme - set date?
// we are overwriting, so remove old bitstream
if (existingBitstream != null)
{
targetBundle.removeBitstream(existingBitstream);
}
if (!isQuiet)
{
System.out.println("FILTERED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") and created '" + newName + "'");
}
//do post-processing of the generated bitstream
formatFilter.postProcessBitstream(c, item, b);
return true;
}
/**
* Return the item that is currently being processed/filtered
* by the MediaFilterManager.
* <p>
* This allows FormatFilters to retrieve the Item object
* in case they need access to item-level information for their format
* transformations/conversions.
*
* @return current Item being processed by MediaFilterManager
*/
public static Item getCurrentItem()
{
return currentItem;
}
/**
* Check whether or not to skip processing the given identifier.
*
* @param identifier
* identifier (handle) of a community, collection or item
*
* @return true if this community, collection or item should be skipped
* during processing. Otherwise, return false.
*/
public static boolean inSkipList(String identifier)
{
if(skipList!=null && skipList.contains(identifier))
{
if (!isQuiet)
{
System.out.println("SKIP-LIST: skipped bitstreams within identifier " + identifier);
}
return true;
}
else
{
return false;
}
}
}

View File

@@ -0,0 +1,481 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.InputStream;
import java.util.*;
import org.apache.log4j.Logger;
import org.dspace.app.mediafilter.service.MediaFilterService;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.*;
import org.dspace.content.Collection;
import org.dspace.content.service.*;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.SelfNamedPlugin;
import org.dspace.eperson.Group;
import org.dspace.eperson.service.GroupService;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
/**
* MediaFilterManager is the class that invokes the media/format filters over the
* repository's content. A few command line flags affect the operation of the
* MFM: -v verbose outputs all extracted text to STDOUT; -f force forces all
* bitstreams to be processed, even if they have been before; -n noindex does not
* recreate index after processing bitstreams; -i [identifier] limits processing
* scope to a community, collection or item; and -m [max] limits processing to a
* maximum number of items.
*/
public class MediaFilterServiceImpl implements MediaFilterService, InitializingBean
{
@Autowired(required = true)
protected AuthorizeService authorizeService;
@Autowired(required = true)
protected BitstreamFormatService bitstreamFormatService;
@Autowired(required = true)
protected BitstreamService bitstreamService;
@Autowired(required = true)
protected BundleService bundleService;
@Autowired(required = true)
protected CollectionService collectionService;
@Autowired(required = true)
protected CommunityService communityService;
@Autowired(required = true)
protected GroupService groupService;
@Autowired(required = true)
protected ItemService itemService;
protected int max2Process = Integer.MAX_VALUE; // maximum number items to process
protected int processed = 0; // number items processed
protected Item currentItem = null; // current item being processed
protected List<FormatFilter> filterClasses = null;
protected Map<String, List<String>> filterFormats = new HashMap<>();
protected List<String> skipList = null; //list of identifiers to skip during processing
protected final List<String> publicFiltersClasses = new ArrayList<>();
protected boolean isVerbose = false;
protected boolean isQuiet = false;
protected boolean isForce = false; // default to not forced
@Override
public void afterPropertiesSet() throws Exception {
String publicPermissionFilters = ConfigurationManager.getProperty("filter.org.dspace.app.mediafilter.publicPermission");
if(publicPermissionFilters != null) {
String[] publicPermisionFiltersArray = publicPermissionFilters.split(",");
for(String filter : publicPermisionFiltersArray) {
publicFiltersClasses.add(filter.trim());
}
}
}
@Override
public void applyFiltersAllItems(Context context) throws Exception
{
if(skipList!=null)
{
//if a skip-list exists, we need to filter community-by-community
//so we can respect what is in the skip-list
List<Community> topLevelCommunities = communityService.findAllTop(context);
for (Community topLevelCommunity : topLevelCommunities) {
applyFiltersCommunity(context, topLevelCommunity);
}
}
else
{
//otherwise, just find every item and process
Iterator<Item> itemIterator = itemService.findAll(context);
while (itemIterator.hasNext() && processed < max2Process)
{
applyFiltersItem(context, itemIterator.next());
}
}
}
@Override
public void applyFiltersCommunity(Context context, Community community)
throws Exception
{ //only apply filters if community not in skip-list
if(!inSkipList(community.getHandle()))
{
List<Community> subcommunities = community.getSubcommunities();
for (Community subcommunity : subcommunities) {
applyFiltersCommunity(context, subcommunity);
}
List<Collection> collections = community.getCollections();
for (Collection collection : collections) {
applyFiltersCollection(context, collection);
}
}
}
@Override
public void applyFiltersCollection(Context context, Collection collection)
throws Exception
{
//only apply filters if collection not in skip-list
if(!inSkipList(collection.getHandle()))
{
Iterator<Item> itemIterator = itemService.findAllByCollection(context, collection);
while (itemIterator.hasNext() && processed < max2Process)
{
applyFiltersItem(context, itemIterator.next());
}
}
}
@Override
public void applyFiltersItem(Context c, Item item) throws Exception
{
//only apply filters if item not in skip-list
if(!inSkipList(item.getHandle()))
{
//cache this item in MediaFilterManager
//so it can be accessed by MediaFilters as necessary
currentItem = item;
if (filterItem(c, item))
{
// increment processed count
++processed;
}
// clear item objects from context cache and internal cache
currentItem = null;
}
}
@Override
public boolean filterItem(Context context, Item myItem) throws Exception
{
// get 'original' bundles
List<Bundle> myBundles = itemService.getBundles(myItem, "ORIGINAL");
boolean done = false;
for (Bundle myBundle : myBundles) {
// now look at all of the bitstreams
List<Bitstream> myBitstreams = myBundle.getBitstreams();
for (Bitstream myBitstream : myBitstreams) {
done |= filterBitstream(context, myItem, myBitstream);
}
}
return done;
}
@Override
public boolean filterBitstream(Context context, Item myItem,
Bitstream myBitstream) throws Exception
{
boolean filtered = false;
// iterate through filter classes. A single format may be actioned
// by more than one filter
for (FormatFilter filterClass : filterClasses) {
//List fmts = (List)filterFormats.get(filterClasses[i].getClass().getName());
String pluginName = null;
//if this filter class is a SelfNamedPlugin,
//its list of supported formats is different for
//differently named "plugin"
if (SelfNamedPlugin.class.isAssignableFrom(filterClass.getClass())) {
//get plugin instance name for this media filter
pluginName = ((SelfNamedPlugin) filterClass).getPluginInstanceName();
}
//Get list of supported formats for the filter (and possibly named plugin)
//For SelfNamedPlugins, map key is:
// <class-name><separator><plugin-name>
//For other MediaFilters, map key is just:
// <class-name>
List<String> fmts = filterFormats.get(filterClass.getClass().getName() +
(pluginName != null ? FILTER_PLUGIN_SEPARATOR + pluginName : ""));
if (fmts.contains(myBitstream.getFormat(context).getShortDescription())) {
try {
// only update item if bitstream not skipped
if (processBitstream(context, myItem, myBitstream, filterClass)) {
itemService.update(context, myItem); // Make sure new bitstream has a sequence
// number
filtered = true;
}
} catch (Exception e) {
String handle = myItem.getHandle();
List<Bundle> bundles = myBitstream.getBundles();
long size = myBitstream.getSize();
String checksum = myBitstream.getChecksum() + " (" + myBitstream.getChecksumAlgorithm() + ")";
int assetstore = myBitstream.getStoreNumber();
// Printout helpful information to find the errored bitstream.
System.out.println("ERROR filtering, skipping bitstream:\n");
System.out.println("\tItem Handle: " + handle);
for (Bundle bundle : bundles) {
System.out.println("\tBundle Name: " + bundle.getName());
}
System.out.println("\tFile Size: " + size);
System.out.println("\tChecksum: " + checksum);
System.out.println("\tAsset Store: " + assetstore);
System.out.println(e);
e.printStackTrace();
}
} else if (filterClass instanceof SelfRegisterInputFormats) {
// Filter implements self registration, so check to see if it should be applied
// given the formats it claims to support
SelfRegisterInputFormats srif = (SelfRegisterInputFormats) filterClass;
boolean applyFilter = false;
// Check MIME type
String[] mimeTypes = srif.getInputMIMETypes();
if (mimeTypes != null) {
for (String mimeType : mimeTypes) {
if (mimeType.equalsIgnoreCase(myBitstream.getFormat(context).getMIMEType())) {
applyFilter = true;
}
}
}
// Check description
if (!applyFilter) {
String[] descriptions = srif.getInputDescriptions();
if (descriptions != null) {
for (String desc : descriptions) {
if (desc.equalsIgnoreCase(myBitstream.getFormat(context).getShortDescription())) {
applyFilter = true;
}
}
}
}
// Check extensions
if (!applyFilter) {
String[] extensions = srif.getInputExtensions();
if (extensions != null) {
for (String ext : extensions) {
List<String> formatExtensions = myBitstream.getFormat(context).getExtensions();
if (formatExtensions != null && formatExtensions.contains(ext)) {
applyFilter = true;
}
}
}
}
// Filter claims to handle this type of file, so attempt to apply it
if (applyFilter) {
try {
// only update item if bitstream not skipped
if (processBitstream(context, myItem, myBitstream, filterClass)) {
itemService.update(context, myItem); // Make sure new bitstream has a sequence
// number
filtered = true;
}
} catch (Exception e) {
System.out.println("ERROR filtering, skipping bitstream #"
+ myBitstream.getID() + " " + e);
e.printStackTrace();
}
}
}
}
return filtered;
}
@Override
public boolean processBitstream(Context context, Item item, Bitstream source, FormatFilter formatFilter)
throws Exception
{
//do pre-processing of this bitstream, and if it fails, skip this bitstream!
if(!formatFilter.preProcessBitstream(context, item, source, isVerbose))
{
return false;
}
boolean overWrite = isForce;
// get bitstream filename, calculate destination filename
String newName = formatFilter.getFilteredName(source.getName());
Bitstream existingBitstream = null; // is there an existing rendition?
Bundle targetBundle = null; // bundle we're modifying
List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName());
// check if destination bitstream exists
if (bundles.size() > 0)
{
// only finds the last match (FIXME?)
for (Bundle bundle : bundles) {
List<Bitstream> bitstreams = bundle.getBitstreams();
for (Bitstream bitstream : bitstreams) {
if (bitstream.getName().equals(newName)) {
targetBundle = bundle;
existingBitstream = bitstream;
}
}
}
}
// if exists and overwrite = false, exit
if (!overWrite && (existingBitstream != null))
{
if (!isQuiet)
{
System.out.println("SKIPPED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") because '" + newName + "' already exists");
}
return false;
}
if(isVerbose) {
System.out.println("PROCESSING: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ")");
}
InputStream destStream;
try {
System.out.println("File: " + newName);
destStream = formatFilter.getDestinationStream(item, bitstreamService.retrieve(context, source), isVerbose);
if (destStream == null) {
if (!isQuiet) {
System.out.println("SKIPPED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") because filtering was unsuccessful");
}
return false;
}
} catch (OutOfMemoryError oome) {
System.out.println("!!! OutOfMemoryError !!!");
return false;
}
// create new bundle if needed
if (bundles.size() < 1)
{
targetBundle = bundleService.create(context, item, formatFilter.getBundleName());
}
else
{
// take the first match
targetBundle = bundles.get(0);
}
Bitstream b = bitstreamService.create(context, targetBundle, destStream);
// Now set the format and name of the bitstream
b.setName(context, newName);
b.setSource(context, "Written by FormatFilter " + formatFilter.getClass().getName() +
" on " + DCDate.getCurrent() + " (GMT).");
b.setDescription(context, formatFilter.getDescription());
// Find the proper format
BitstreamFormat bf = bitstreamFormatService.findByShortDescription(context,
formatFilter.getFormatString());
bitstreamService.setFormat(context, b, bf);
bitstreamService.update(context, b);
//Set permissions on the derivative bitstream
//- First remove any existing policies
authorizeService.removeAllPolicies(context, b);
//- Determine if this is a public-derivative format
if(publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) {
//- Set derivative bitstream to be publicly accessible
Group anonymous = groupService.findByName(context, Group.ANONYMOUS);
authorizeService.addPolicy(context, b, Constants.READ, anonymous);
} else {
//- Inherit policies from the source bitstream
authorizeService.inheritPolicies(context, source, b);
}
// fixme - set date?
// we are overwriting, so remove old bitstream
if (existingBitstream != null)
{
bundleService.removeBitstream(context, targetBundle, existingBitstream);
}
if (!isQuiet)
{
System.out.println("FILTERED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") and created '" + newName + "'");
}
//do post-processing of the generated bitstream
formatFilter.postProcessBitstream(context, item, b);
return true;
}
@Override
public Item getCurrentItem()
{
return currentItem;
}
@Override
public boolean inSkipList(String identifier)
{
if(skipList!=null && skipList.contains(identifier))
{
if (!isQuiet)
{
System.out.println("SKIP-LIST: skipped bitstreams within identifier " + identifier);
}
return true;
}
else
{
return false;
}
}
@Override
public void setVerbose(boolean isVerbose) {
this.isVerbose = isVerbose;
}
@Override
public void setQuiet(boolean isQuiet) {
this.isQuiet = isQuiet;
}
@Override
public void setForce(boolean isForce) {
this.isForce = isForce;
}
@Override
public void setMax2Process(int max2Process) {
this.max2Process = max2Process;
}
@Override
public void setFilterClasses(List<FormatFilter> filterClasses) {
this.filterClasses = filterClasses;
}
@Override
public void setSkipList(List<String> skipList) {
this.skipList = skipList;
}
@Override
public void setFilterFormats(Map<String, List<String>> filterFormats) {
this.filterFormats = filterFormats;
}
}

View File

@@ -19,6 +19,7 @@ import java.io.Writer;
import org.apache.log4j.Logger;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.util.PDFTextStripper;
import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager;
/*
@@ -32,6 +33,7 @@ public class PDFFilter extends MediaFilter
private static Logger log = Logger.getLogger(PDFFilter.class);
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".txt";
@@ -41,6 +43,7 @@ public class PDFFilter extends MediaFilter
* @return String bundle name
*
*/
@Override
public String getBundleName()
{
return "TEXT";
@@ -49,6 +52,7 @@ public class PDFFilter extends MediaFilter
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString()
{
return "Text";
@@ -57,6 +61,7 @@ public class PDFFilter extends MediaFilter
/**
* @return String description
*/
@Override
public String getDescription()
{
return "Extracted text";
@@ -68,7 +73,8 @@ public class PDFFilter extends MediaFilter
*
* @return InputStream the resulting input stream
*/
public InputStream getDestinationStream(InputStream source)
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception
{
try

View File

@@ -16,6 +16,7 @@ import org.apache.poi.hslf.extractor.PowerPointExtractor;
import org.apache.poi.POITextExtractor;
import org.apache.log4j.Logger;
import org.dspace.content.Item;
/*
* TODO: Allow user to configure extraction of only text or only notes
@@ -26,6 +27,7 @@ public class PowerPointFilter extends MediaFilter
private static Logger log = Logger.getLogger(PowerPointFilter.class);
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".txt";
@@ -35,6 +37,7 @@ public class PowerPointFilter extends MediaFilter
* @return String bundle name
*
*/
@Override
public String getBundleName()
{
return "TEXT";
@@ -45,6 +48,7 @@ public class PowerPointFilter extends MediaFilter
*
* TODO: Check that this is correct
*/
@Override
public String getFormatString()
{
return "Text";
@@ -53,6 +57,7 @@ public class PowerPointFilter extends MediaFilter
/**
* @return String description
*/
@Override
public String getDescription()
{
return "Extracted text";
@@ -64,7 +69,8 @@ public class PowerPointFilter extends MediaFilter
*
* @return InputStream the resulting input stream
*/
public InputStream getDestinationStream(InputStream source)
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception
{
@@ -101,7 +107,7 @@ public class PowerPointFilter extends MediaFilter
{
// if verbose flag is set, print out extracted text
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println(extractedText);
}

View File

@@ -13,6 +13,7 @@ import java.io.IOException;
import org.apache.log4j.Logger;
import org.dspace.content.Item;
import org.textmining.extraction.TextExtractor;
import org.textmining.extraction.word.WordTextExtractorFactory;
@@ -27,6 +28,7 @@ public class WordFilter extends MediaFilter
private static Logger log = Logger.getLogger(WordFilter.class);
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".txt";
@@ -36,6 +38,7 @@ public class WordFilter extends MediaFilter
* @return String bundle name
*
*/
@Override
public String getBundleName()
{
return "TEXT";
@@ -44,6 +47,7 @@ public class WordFilter extends MediaFilter
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString()
{
return "Text";
@@ -52,6 +56,7 @@ public class WordFilter extends MediaFilter
/**
* @return String description
*/
@Override
public String getDescription()
{
return "Extracted text";
@@ -63,7 +68,8 @@ public class WordFilter extends MediaFilter
*
* @return InputStream the resulting input stream
*/
public InputStream getDestinationStream(InputStream source)
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception
{
// get input stream from bitstream
@@ -76,7 +82,7 @@ public class WordFilter extends MediaFilter
// if verbose flag is set, print out extracted text
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println(extractedText);
}

View File

@@ -18,6 +18,7 @@ import java.io.OutputStream;
import java.util.Arrays;
import org.apache.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Utils;
@@ -48,7 +49,7 @@ public class XPDF2Text extends MediaFilter
private static Logger log = Logger.getLogger(XPDF2Text.class);
// Command to get text from pdf; @infile@, @COMMAND@ are placeholders
private static final String XPDF_PDFTOTEXT_COMMAND[] =
protected static final String XPDF_PDFTOTEXT_COMMAND[] =
{
"@COMMAND@", "-q", "-enc", "UTF-8", "@infile@", "-"
};
@@ -57,27 +58,32 @@ public class XPDF2Text extends MediaFilter
// executable path that comes from DSpace config at runtime.
private String pdftotextPath = null;
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".txt";
}
@Override
public String getBundleName()
{
return "TEXT";
}
@Override
public String getFormatString()
{
return "Text";
}
@Override
public String getDescription()
{
return "Extracted Text";
}
public InputStream getDestinationStream(InputStream sourceStream)
@Override
public InputStream getDestinationStream(Item currentItem, InputStream sourceStream, boolean verbose)
throws Exception
{
// get configured value for path to XPDF command:

View File

@@ -29,6 +29,7 @@ import java.util.regex.Pattern;
import javax.imageio.ImageIO;
import org.apache.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Utils;
@@ -62,62 +63,67 @@ public class XPDF2Thumbnail extends MediaFilter
private static Logger log = Logger.getLogger(XPDF2Thumbnail.class);
// maximum size of either preview image dimension
private static final int MAX_PX = 800;
protected static final int MAX_PX = 800;
// maxium DPI - use common screen res, 100dpi.
private static final int MAX_DPI = 100;
protected static final int MAX_DPI = 100;
// command to get image from PDF; @FILE@, @OUTPUT@ are placeholders
private static final String XPDF_PDFTOPPM_COMMAND[] =
protected static final String XPDF_PDFTOPPM_COMMAND[] =
{
"@COMMAND@", "-q", "-f", "1", "-l", "1",
"-r", "@DPI@", "@FILE@", "@OUTPUTFILE@"
};
// command to get image from PDF; @FILE@, @OUTPUT@ are placeholders
private static final String XPDF_PDFINFO_COMMAND[] =
protected static final String XPDF_PDFINFO_COMMAND[] =
{
"@COMMAND@", "-f", "1", "-l", "1", "-box", "@FILE@"
};
// executable path for "pdftoppm", comes from DSpace config at runtime.
private String pdftoppmPath = null;
protected String pdftoppmPath = null;
// executable path for "pdfinfo", comes from DSpace config at runtime.
private String pdfinfoPath = null;
protected String pdfinfoPath = null;
// match line in pdfinfo output that describes file's MediaBox
private static final Pattern MEDIABOX_PATT = Pattern.compile(
protected static final Pattern MEDIABOX_PATT = Pattern.compile(
"^Page\\s+\\d+\\s+MediaBox:\\s+([\\.\\d-]+)\\s+([\\.\\d-]+)\\s+([\\.\\d-]+)\\s+([\\.\\d-]+)");
// also from thumbnail.maxwidth in config
private int xmax = 0;
protected int xmax = 0;
// backup default for size, on the large side.
private static final int DEFAULT_XMAX = 500;
protected static final int DEFAULT_XMAX = 500;
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".jpg";
}
@Override
public String getBundleName()
{
return "THUMBNAIL";
}
@Override
public String getFormatString()
{
return "JPEG";
}
@Override
public String getDescription()
{
return "Generated Thumbnail";
}
// canonical MediaFilter method to generate the thumbnail as stream.
public InputStream getDestinationStream(InputStream sourceStream)
@Override
public InputStream getDestinationStream(Item currentItem, InputStream sourceStream, boolean verbose)
throws Exception
{
// get config params
@@ -301,7 +307,7 @@ public class XPDF2Thumbnail extends MediaFilter
// if verbose flag is set, print out dimensions
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("original size: " + xsize + "," + ysize);
}
@@ -314,7 +320,7 @@ public class XPDF2Thumbnail extends MediaFilter
// if verbose flag is set, print out extracted text
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("x scale factor: " + scale_factor);
}
@@ -326,7 +332,7 @@ public class XPDF2Thumbnail extends MediaFilter
// if verbose flag is set, print out extracted text
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("new size: " + xsize + "," + ysize);
}
@@ -344,7 +350,7 @@ public class XPDF2Thumbnail extends MediaFilter
}
// if verbose flag is set, print details to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("created thumbnail size: " + xsize + ", "
+ ysize);

View File

@@ -0,0 +1,25 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter.factory;
import org.dspace.app.mediafilter.service.MediaFilterService;
import org.dspace.utils.DSpace;
/**
* Abstract factory to get services for the mediafilter package, use MediaFilterServiceFactory.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
public abstract class MediaFilterServiceFactory {
public abstract MediaFilterService getMediaFilterService();
public static MediaFilterServiceFactory getInstance(){
return new DSpace().getServiceManager().getServiceByName("mediaFilterServiceFactory", MediaFilterServiceFactory.class);
}
}

View File

@@ -0,0 +1,27 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter.factory;
import org.dspace.app.mediafilter.service.MediaFilterService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Factory implementation to get services for the mediafilter package, use MediaFilterServiceFactory.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
public class MediaFilterServiceFactoryImpl extends MediaFilterServiceFactory {
@Autowired(required = true)
private MediaFilterService mediaFilterService;
@Override
public MediaFilterService getMediaFilterService() {
return mediaFilterService;
}
}

View File

@@ -0,0 +1,126 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter.service;
import org.dspace.app.mediafilter.FormatFilter;
import org.dspace.content.Bitstream;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.core.Context;
import java.util.List;
import java.util.Map;
/**
* MediaFilterManager is the class that invokes the media/format filters over the
* repository's content. A few command line flags affect the operation of the
* MFM: -v verbose outputs all extracted text to STDOUT; -f force forces all
* bitstreams to be processed, even if they have been before; -n noindex does not
* recreate index after processing bitstreams; -i [identifier] limits processing
* scope to a community, collection or item; and -m [max] limits processing to a
* maximum number of items.
*/
public interface MediaFilterService {
//separator in filterFormats Map between a filter class name and a plugin name,
//for MediaFilters which extend SelfNamedPlugin (\034 is "file separator" char)
public static final String FILTER_PLUGIN_SEPARATOR = "\034";
public void applyFiltersAllItems(Context context) throws Exception;
public void applyFiltersCommunity(Context context, Community community)
throws Exception;
public void applyFiltersCollection(Context context, Collection collection)
throws Exception;
public void applyFiltersItem(Context c, Item item) throws Exception;
/**
* Iterate through the item's bitstreams in the ORIGINAL bundle, applying
* filters if possible.
*
* @return true if any bitstreams processed,
* false if none
*/
public boolean filterItem(Context context, Item myItem) throws Exception;
/**
* Attempt to filter a bitstream.
*
* An exception will be thrown if the media filter class cannot be
* instantiated. Exceptions from filtering will be logged to STDOUT and
* swallowed.
*
* @return true if bitstream processed,
* false if no applicable filter or already processed
*/
public boolean filterBitstream(Context c, Item myItem, Bitstream myBitstream) throws Exception;
/**
* A utility class that calls the virtual methods
* from the current MediaFilter class.
* It scans the bitstreams in an item, and decides if a bitstream has
* already been filtered, and if not or if overWrite is set, invokes the
* filter.
*
* @param c
* context
* @param item
* item containing bitstream to process
* @param source
* source bitstream to process
* @param formatFilter
* FormatFilter to perform filtering
*
* @return true if new rendition is created, false if rendition already
* exists and overWrite is not set
*/
public boolean processBitstream(Context context, Item item, Bitstream source, FormatFilter formatFilter)
throws Exception;
/**
* Return the item that is currently being processed/filtered
* by the MediaFilterManager.
* <p>
* This allows FormatFilters to retrieve the Item object
* in case they need access to item-level information for their format
* transformations/conversions.
*
* @return current Item being processed by MediaFilterManager
*/
public Item getCurrentItem();
/**
* Check whether or not to skip processing the given identifier.
*
* @param identifier
* identifier (handle) of a community, collection or item
*
* @return true if this community, collection or item should be skipped
* during processing. Otherwise, return false.
*/
public boolean inSkipList(String identifier);
public void setVerbose(boolean isVerbose);
public void setQuiet(boolean isQuiet);
public void setForce(boolean isForce);
public void setMax2Process(int max2Process);
public void setFilterClasses(List<FormatFilter> filterClasses);
public void setSkipList(List<String> skipList);
public void setFilterFormats(Map<String, List<String>> filterFormats);
}

View File

@@ -31,7 +31,9 @@ import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.PluginManager;
import org.dspace.eperson.EPerson;
import org.dspace.handle.HandleManager;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.workflow.WorkflowException;
/**
* Command-line interface to the Packager plugin.
@@ -113,12 +115,12 @@ import org.dspace.handle.HandleManager;
public class Packager
{
/* Various private global settings/options */
private String packageType = null;
private boolean submit = true;
private boolean userInteractionEnabled = true;
protected String packageType = null;
protected boolean submit = true;
protected boolean userInteractionEnabled = true;
// die from illegal command line
private static void usageError(String msg)
protected static void usageError(String msg)
{
System.out.println(msg);
System.out.println(" (run with -h flag for details)");
@@ -316,7 +318,7 @@ public class Packager
// find the EPerson, assign to context
Context context = new Context();
EPerson myEPerson = null;
myEPerson = EPerson.findByEmail(context, eperson);
myEPerson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, eperson);
if (myEPerson == null)
{
usageError("Error, eperson cannot be found: " + eperson);
@@ -339,7 +341,7 @@ public class Packager
//if a specific identifier was specified, make sure it is valid
if(identifier!=null && identifier.length()>0)
{
objToReplace = HandleManager.resolveToObject(context, identifier);
objToReplace = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, identifier);
if (objToReplace == null)
{
throw new IllegalArgumentException("Bad identifier/handle -- "
@@ -407,7 +409,7 @@ public class Packager
for (int i = 0; i < parents.length; i++)
{
// sanity check: did handle resolve?
parentObjs[i] = HandleManager.resolveToObject(context,
parentObjs[i] = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context,
parents[i]);
if (parentObjs[i] == null)
{
@@ -449,7 +451,7 @@ public class Packager
usageError("Error, Unknown package type: " + myPackager.packageType);
}
DSpaceObject dso = HandleManager.resolveToObject(context, identifier);
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, identifier);
if (dso == null)
{
throw new IllegalArgumentException("Bad identifier/handle -- "
@@ -504,98 +506,103 @@ public class Packager
//NOTE: at this point, Parent may be null -- in which case it is up to the PackageIngester
// to either determine the Parent (from package contents) or throw an error.
//If we are doing a recursive ingest, call ingestAll()
if(pkgParams.recursiveModeEnabled())
try
{
System.out.println("\nAlso ingesting all referenced packages (recursive mode)..");
System.out.println("This may take a while, please check your logs for ongoing status while we process each package.");
//ingest first package & recursively ingest anything else that package references (child packages, etc)
List<DSpaceObject> dsoResults = sip.ingestAll(context, parent, pkgFile, pkgParams, null);
if(dsoResults!=null)
//If we are doing a recursive ingest, call ingestAll()
if(pkgParams.recursiveModeEnabled())
{
//Report total objects created
System.out.println("\nCREATED a total of " + dsoResults.size() + " DSpace Objects.");
System.out.println("\nAlso ingesting all referenced packages (recursive mode)..");
System.out.println("This may take a while, please check your logs for ongoing status while we process each package.");
String choiceString = null;
//Ask if user wants full list printed to command line, as this may be rather long.
if(this.userInteractionEnabled)
{
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
System.out.print("\nWould you like to view a list of all objects that were created? [y/n]: ");
choiceString = input.readLine();
}
else
{
// user interaction disabled -- default answer to 'yes', as
// we want to provide user with as detailed a report as possible.
choiceString = "y";
}
//ingest first package & recursively ingest anything else that package references (child packages, etc)
List<String> hdlResults = sip.ingestAll(context, parent, pkgFile, pkgParams, null);
// Provide detailed report if user answered 'yes'
if (choiceString.equalsIgnoreCase("y"))
if (hdlResults != null)
{
System.out.println("\n\n");
for(DSpaceObject result : dsoResults)
//Report total objects created
System.out.println("\nCREATED a total of " + hdlResults.size() + " DSpace Objects.");
String choiceString = null;
//Ask if user wants full list printed to command line, as this may be rather long.
if (this.userInteractionEnabled)
{
if(pkgParams.restoreModeEnabled())
{
System.out.println("RESTORED DSpace " + Constants.typeText[result.getType()] +
" [ hdl=" + result.getHandle() + ", dbID=" + result.getID() + " ] ");
}
else
{
System.out.println("CREATED new DSpace " + Constants.typeText[result.getType()] +
" [ hdl=" + result.getHandle() + ", dbID=" + result.getID() + " ] ");
}
}
}
}
}
else
{
//otherwise, just one package to ingest
try
{
DSpaceObject dso = sip.ingest(context, parent, pkgFile, pkgParams, null);
if(dso!=null)
{
if(pkgParams.restoreModeEnabled())
{
System.out.println("RESTORED DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + ", dbID=" + dso.getID() + " ] ");
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
System.out.print("\nWould you like to view a list of all objects that were created? [y/n]: ");
choiceString = input.readLine();
}
else
{
System.out.println("CREATED new DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + ", dbID=" + dso.getID() + " ] ");
// user interaction disabled -- default answer to 'yes', as
// we want to provide user with as detailed a report as possible.
choiceString = "y";
}
// Provide detailed report if user answered 'yes'
if (choiceString.equalsIgnoreCase("y"))
{
System.out.println("\n\n");
for (String result : hdlResults)
{
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, result);
if(dso!=null)
{
if (pkgParams.restoreModeEnabled()) {
System.out.println("RESTORED DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + ", dbID=" + dso.getID() + " ] ");
} else {
System.out.println("CREATED new DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + ", dbID=" + dso.getID() + " ] ");
}
}
}
}
}
}
else
{
//otherwise, just one package to ingest
try
{
DSpaceObject dso = sip.ingest(context, parent, pkgFile, pkgParams, null);
if (dso != null)
{
if (pkgParams.restoreModeEnabled())
{
System.out.println("RESTORED DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + ", dbID=" + dso.getID() + " ] ");
}
else
{
System.out.println("CREATED new DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + ", dbID=" + dso.getID() + " ] ");
}
}
}
catch (IllegalStateException ie)
{
// NOTE: if we encounter an IllegalStateException, this means the
// handle is already in use and this object already exists.
//if we are skipping over (i.e. keeping) existing objects
if (pkgParams.keepExistingModeEnabled())
{
System.out.println("\nSKIPPED processing package '" + pkgFile + "', as an Object already exists with this handle.");
}
else // Pass this exception on -- which essentially causes a full rollback of all changes (this is the default)
{
throw ie;
}
}
}
catch(IllegalStateException ie)
{
// NOTE: if we encounter an IllegalStateException, this means the
// handle is already in use and this object already exists.
//if we are skipping over (i.e. keeping) existing objects
if(pkgParams.keepExistingModeEnabled())
{
System.out.println("\nSKIPPED processing package '" + pkgFile + "', as an Object already exists with this handle.");
}
else // Pass this exception on -- which essentially causes a full rollback of all changes (this is the default)
{
throw ie;
}
}
}
catch (WorkflowException e)
{
throw new PackageException(e);
}
}
@@ -719,58 +726,67 @@ public class Packager
// NOTE: At this point, objToReplace may be null. If it is null, it is up to the PackageIngester
// to determine which Object needs to be replaced (based on the handle specified in the pkg, etc.)
//If we are doing a recursive replace, call replaceAll()
if(pkgParams.recursiveModeEnabled())
try
{
//ingest first object using package & recursively replace anything else that package references (child objects, etc)
List<DSpaceObject> dsoResults = sip.replaceAll(context, objToReplace, pkgFile, pkgParams);
if(dsoResults!=null)
//If we are doing a recursive replace, call replaceAll()
if (pkgParams.recursiveModeEnabled())
{
//Report total objects replaced
System.out.println("\nREPLACED a total of " + dsoResults.size() + " DSpace Objects.");
//ingest first object using package & recursively replace anything else that package references (child objects, etc)
List<String> hdlResults = sip.replaceAll(context, objToReplace, pkgFile, pkgParams);
String choiceString = null;
//Ask if user wants full list printed to command line, as this may be rather long.
if(this.userInteractionEnabled)
{
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
System.out.print("\nWould you like to view a list of all objects that were replaced? [y/n]: ");
choiceString = input.readLine();
}
else
{
// user interaction disabled -- default answer to 'yes', as
// we want to provide user with as detailed a report as possible.
choiceString = "y";
}
if (hdlResults != null) {
//Report total objects replaced
System.out.println("\nREPLACED a total of " + hdlResults.size() + " DSpace Objects.");
// Provide detailed report if user answered 'yes'
if (choiceString.equalsIgnoreCase("y"))
{
System.out.println("\n\n");
for(DSpaceObject result : dsoResults)
String choiceString = null;
//Ask if user wants full list printed to command line, as this may be rather long.
if (this.userInteractionEnabled)
{
System.out.println("REPLACED DSpace " + Constants.typeText[result.getType()] +
" [ hdl=" + result.getHandle() + " ] ");
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
System.out.print("\nWould you like to view a list of all objects that were replaced? [y/n]: ");
choiceString = input.readLine();
}
else
{
// user interaction disabled -- default answer to 'yes', as
// we want to provide user with as detailed a report as possible.
choiceString = "y";
}
// Provide detailed report if user answered 'yes'
if (choiceString.equalsIgnoreCase("y"))
{
System.out.println("\n\n");
for (String result : hdlResults)
{
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, result);
if (dso != null)
{
System.out.println("REPLACED DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + " ] ");
}
}
}
}
}
else
{
//otherwise, just one object to replace
DSpaceObject dso = sip.replace(context, objToReplace, pkgFile, pkgParams);
if (dso != null)
{
System.out.println("REPLACED DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + " ] ");
}
}
}
else
catch (WorkflowException e)
{
//otherwise, just one object to replace
DSpaceObject dso = sip.replace(context, objToReplace, pkgFile, pkgParams);
if(dso!=null)
{
System.out.println("REPLACED DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + " ] ");
}
throw new PackageException(e);
}
}
}

View File

@@ -7,138 +7,133 @@
*/
package org.dspace.app.requestitem;
import org.apache.log4j.Logger;
import org.dspace.content.Bitstream;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.core.Utils;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import java.sql.SQLException;
import javax.persistence.*;
import java.util.Date;
/**
* Object representing an Item Request
*/
@Entity
@Table(name="requestitem")
public class RequestItem {
private static Logger log = Logger.getLogger(RequestItem.class);
private int bitstreamId, itemID;
@Id
@Column(name="requestitem_id")
@GeneratedValue(strategy = GenerationType.SEQUENCE ,generator="requestitem_seq")
@SequenceGenerator(name="requestitem_seq", sequenceName="requestitem_seq", allocationSize = 1)
private int requestitem_id;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "bitstream_id")
private Bitstream bitstream;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "item_id")
private Item item;
@Column(name = "request_email", length = 64)
private String reqEmail;
@Column(name = "request_name", length = 64)
private String reqName;
// @Column(name = "request_message")
// @Lob
@Column(name="request_message", columnDefinition = "text")
private String reqMessage;
@Column(name = "token", unique = true, length = 48)
private String token;
@Column(name = "allfiles")
private boolean allfiles;
private Date decision_date;
private boolean accept_request;
public RequestItem(int itemID, int bitstreamId, String reqEmail, String reqName, String reqMessage, boolean allfiles){
this.itemID = itemID;
this.bitstreamId = bitstreamId;
this.reqEmail = reqEmail;
this.reqName = reqName;
this.reqMessage = reqMessage;
this.allfiles = allfiles;
}
@Column(name = "decision_date")
@Temporal(TemporalType.TIMESTAMP)
private Date decision_date = null;
private RequestItem(TableRow record) {
this.itemID = record.getIntColumn("item_id");
this.bitstreamId = record.getIntColumn("bitstream_id");
this.token = record.getStringColumn("token");
this.reqEmail = record.getStringColumn("request_email");
this.reqName = record.getStringColumn("request_name");
this.reqMessage = record.getStringColumn("request_message");
this.allfiles = record.getBooleanColumn("allfiles");
this.decision_date = record.getDateColumn("decision_date");
this.accept_request = record.getBooleanColumn("accept_request");
}
@Column(name = "expires")
@Temporal(TemporalType.TIMESTAMP)
private Date expires = null;
public static RequestItem findByToken(Context context, String token) {
try {
TableRow requestItem = DatabaseManager.findByUnique(context, "requestitem", "token", token);
return new RequestItem(requestItem);
} catch (SQLException e) {
log.error(e.getMessage());
return null;
}
}
@Column(name = "request_date")
@Temporal(TemporalType.TIMESTAMP)
private Date request_date = null;
@Column(name = "accept_request")
private Boolean accept_request = null;
/**
* Save updates to the record. Only accept_request, and decision_date are set-able.
* @param context
* Protected constructor, create object using:
* {@link org.dspace.app.requestitem.service.RequestItemService#createRequest(Context, Bitstream, Item, boolean, String, String, String)}
*/
public void update(Context context) {
try {
TableRow record = DatabaseManager.findByUnique(context, "requestitem", "token", token);
record.setColumn("accept_request", accept_request);
record.setColumn("decision_date", decision_date);
DatabaseManager.update(context, record);
} catch (SQLException e) {
log.error(e.getMessage());
}
}
/**
* Generate a unique id of the request and put it into the DB
* @param context
* @return
* @throws java.sql.SQLException
*/
public String getNewToken(Context context) throws SQLException
protected RequestItem()
{
TableRow record = DatabaseManager.create(context, "requestitem");
record.setColumn("token", Utils.generateHexKey());
record.setColumn("bitstream_id", bitstreamId);
record.setColumn("item_id", itemID);
record.setColumn("allfiles", allfiles);
record.setColumn("request_email", reqEmail);
record.setColumn("request_name", reqName);
record.setColumn("request_message", reqMessage);
record.setColumnNull("accept_request");
record.setColumn("request_date", new Date());
record.setColumnNull("decision_date");
record.setColumnNull("expires");
}
DatabaseManager.update(context, record);
if (log.isDebugEnabled())
{
log.debug("Created requestitem_token " + record.getIntColumn("requestitem_id")
+ " with token " + record.getStringColumn("token") + "\"");
}
return record.getStringColumn("token");
public int getID() {
return requestitem_id;
}
void setAllfiles(boolean allfiles) {
this.allfiles = allfiles;
}
public boolean isAllfiles() {
return allfiles;
}
void setReqMessage(String reqMessage) {
this.reqMessage = reqMessage;
}
public String getReqMessage() {
return reqMessage;
}
void setReqName(String reqName) {
this.reqName = reqName;
}
public String getReqName() {
return reqName;
}
void setReqEmail(String reqEmail) {
this.reqEmail = reqEmail;
}
public String getReqEmail() {
return reqEmail;
}
void setToken(String token) {
this.token = token;
}
public String getToken() {
return token;
}
public int getItemID() {
return itemID;
void setItem(Item item) {
this.item = item;
}
public int getBitstreamId() {
return bitstreamId;
public Item getItem() {
return item;
}
void setBitstream(Bitstream bitstream) {
this.bitstream = bitstream;
}
public Bitstream getBitstream() {
return bitstream;
}
public Date getDecision_date() {
@@ -156,4 +151,20 @@ public class RequestItem {
public void setAccept_request(boolean accept_request) {
this.accept_request = accept_request;
}
public Date getExpires() {
return expires;
}
void setExpires(Date expires) {
this.expires = expires;
}
public Date getRequest_date() {
return request_date;
}
void setRequest_date(Date request_date) {
this.request_date = request_date;
}
}

View File

@@ -15,6 +15,8 @@ import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.I18nUtil;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.service.EPersonService;
import org.springframework.beans.factory.annotation.Autowired;
import java.sql.SQLException;
@@ -29,7 +31,10 @@ import java.sql.SQLException;
*/
public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
private static Logger log = Logger.getLogger(RequestItemHelpdeskStrategy.class);
private Logger log = Logger.getLogger(RequestItemHelpdeskStrategy.class);
@Autowired(required = true)
protected EPersonService ePersonService;
public RequestItemHelpdeskStrategy() {}
@@ -57,13 +62,9 @@ public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
public RequestItemAuthor getHelpDeskPerson(Context context, String helpDeskEmail) throws SQLException{
EPerson helpdeskEPerson = null;
try {
context.turnOffAuthorisationSystem();
helpdeskEPerson = EPerson.findByEmail(context, helpDeskEmail);
context.restoreAuthSystemState();
} catch (AuthorizeException e) {
log.error(e.getMessage());
}
context.turnOffAuthorisationSystem();
helpdeskEPerson = ePersonService.findByEmail(context, helpDeskEmail);
context.restoreAuthSystemState();
if(helpdeskEPerson != null) {
return new RequestItemAuthor(helpdeskEPerson);

View File

@@ -8,12 +8,15 @@
package org.dspace.app.requestitem;
import java.sql.SQLException;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.dspace.content.Metadatum;
import org.dspace.content.MetadataValue;
import org.dspace.content.Item;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.dspace.core.I18nUtil;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Try to look to an item metadata for the corresponding author name and email.
@@ -24,8 +27,11 @@ import org.dspace.core.I18nUtil;
*/
public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy {
private String emailMetadata;
private String fullNameMatadata;
protected String emailMetadata;
protected String fullNameMatadata;
@Autowired(required = true)
protected ItemService itemService;
public RequestItemMetadataStrategy() {
}
@@ -35,17 +41,17 @@ public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy {
throws SQLException {
if (emailMetadata != null)
{
Metadatum[] vals = item.getMetadataByMetadataString(emailMetadata);
if (vals.length > 0)
List<MetadataValue> vals = itemService.getMetadataByMetadataString(item, emailMetadata);
if (vals.size() > 0)
{
String email = vals[0].value;
String email = vals.iterator().next().getValue();
String fullname = null;
if (fullNameMatadata != null)
{
Metadatum[] nameVals = item.getMetadataByMetadataString(fullNameMatadata);
if (nameVals.length > 0)
List<MetadataValue> nameVals = itemService.getMetadataByMetadataString(item, fullNameMatadata);
if (nameVals.size() > 0)
{
fullname = nameVals[0].value;
fullname = nameVals.iterator().next().getValue();
}
}

View File

@@ -0,0 +1,77 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.requestitem;
import org.apache.log4j.Logger;
import org.dspace.app.requestitem.dao.RequestItemDAO;
import org.dspace.app.requestitem.service.RequestItemService;
import org.dspace.content.Bitstream;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.core.Utils;
import org.springframework.beans.factory.annotation.Autowired;
import java.sql.SQLException;
import java.util.Date;
/**
* Service implementation for the RequestItem object.
* This class is responsible for all business logic calls for the RequestItem object and is autowired by spring.
* This class should never be accessed directly.
*
* @author kevinvandevelde at atmire.com
*/
public class RequestItemServiceImpl implements RequestItemService {
private final Logger log = Logger.getLogger(RequestItemServiceImpl.class);
@Autowired(required = true)
protected RequestItemDAO requestItemDAO;
@Override
public String createRequest(Context context, Bitstream bitstream, Item item, boolean allFiles, String reqEmail, String reqName, String reqMessage) throws SQLException {
RequestItem requestItem = requestItemDAO.create(context, new RequestItem());
requestItem.setToken(Utils.generateHexKey());
requestItem.setBitstream(bitstream);
requestItem.setItem(item);
requestItem.setAllfiles(allFiles);
requestItem.setReqEmail(reqEmail);
requestItem.setReqName(reqName);
requestItem.setReqMessage(reqMessage);
requestItem.setRequest_date(new Date());
requestItemDAO.save(context, requestItem);
if (log.isDebugEnabled())
{
log.debug("Created requestitem_token " + requestItem.getID()
+ " with token " + requestItem.getToken() + "\"");
}
return requestItem.getToken();
}
@Override
public RequestItem findByToken(Context context, String token) {
try {
return requestItemDAO.findByToken(context, token);
} catch (SQLException e) {
log.error(e.getMessage());
return null;
}
}
@Override
public void update(Context context, RequestItem requestItem) {
try {
requestItemDAO.save(context, requestItem);
} catch (SQLException e) {
log.error(e.getMessage());
}
}
}

View File

@@ -0,0 +1,26 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.requestitem.dao;
import org.dspace.app.requestitem.RequestItem;
import org.dspace.core.Context;
import org.dspace.core.GenericDAO;
import java.sql.SQLException;
/**
* Database Access Object interface class for the RequestItem object.
* The implementation of this class is responsible for all database calls for the RequestItem object and is autowired by spring
* This class should only be accessed from a single service & should never be exposed outside of the API
*
* @author kevinvandevelde at atmire.com
*/
public interface RequestItemDAO extends GenericDAO<RequestItem> {
public RequestItem findByToken(Context context, String token) throws SQLException;
}

View File

@@ -0,0 +1,36 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.requestitem.dao.impl;
import org.dspace.app.requestitem.RequestItem;
import org.dspace.app.requestitem.dao.RequestItemDAO;
import org.dspace.core.Context;
import org.dspace.core.AbstractHibernateDAO;
import org.hibernate.Criteria;
import org.hibernate.criterion.Restrictions;
import java.sql.SQLException;
/**
* Hibernate implementation of the Database Access Object interface class for the RequestItem object.
* This class is responsible for all database calls for the RequestItem object and is autowired by spring
* This class should never be accessed directly.
*
* @author kevinvandevelde at atmire.com
*/
public class RequestItemDAOImpl extends AbstractHibernateDAO<RequestItem> implements RequestItemDAO
{
@Override
public RequestItem findByToken(Context context, String token) throws SQLException {
Criteria criteria = createCriteria(context, RequestItem.class);
criteria.add(Restrictions.eq("token", token));
return uniqueResult(criteria);
}
}

View File

@@ -0,0 +1,27 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.requestitem.factory;
import org.dspace.app.requestitem.service.RequestItemService;
import org.dspace.handle.service.HandleService;
import org.dspace.utils.DSpace;
/**
* Abstract factory to get services for the requestitem package, use RequestItemServiceFactory.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
public abstract class RequestItemServiceFactory {
public abstract RequestItemService getRequestItemService();
public static RequestItemServiceFactory getInstance()
{
return new DSpace().getServiceManager().getServiceByName("requestItemServiceFactory", RequestItemServiceFactory.class);
}
}

View File

@@ -0,0 +1,28 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.requestitem.factory;
import org.dspace.app.requestitem.service.RequestItemService;
import org.dspace.handle.service.HandleService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Factory implementation to get services for the requestitem package, use RequestItemServiceFactory.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
public class RequestItemServiceFactoryImpl extends RequestItemServiceFactory {
@Autowired(required = true)
private RequestItemService requestItemService;
@Override
public RequestItemService getRequestItemService() {
return requestItemService;
}
}

View File

@@ -0,0 +1,44 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.requestitem.service;
import java.sql.SQLException;
import org.dspace.app.requestitem.RequestItem;
import org.dspace.content.Bitstream;
import org.dspace.content.Item;
import org.dspace.core.Context;
/**
* Service interface class for the RequestItem object.
* The implementation of this class is responsible for all business logic calls for the RequestItem object and is autowired by spring
*
* @author kevinvandevelde at atmire.com
*/
public interface RequestItemService {
/**
* Generate a request item representing the request and put it into the DB
* @param context
* @return the token of the request item
* @throws java.sql.SQLException
*/
public String createRequest(Context context, Bitstream bitstream, Item item, boolean allFiles, String reqEmail, String reqName, String reqMessage)
throws SQLException;
public RequestItem findByToken(Context context, String token);
/**
* Save updates to the record. Only accept_request, and decision_date are set-able.
* @param context
*/
public void update(Context context, RequestItem requestItem);
}

View File

@@ -1,340 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.sfx;
import java.io.File;
import java.io.IOException;
import java.net.URLEncoder;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import org.dspace.content.DCPersonName;
import org.dspace.content.Metadatum;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.ParserConfigurationException;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
* XML configuration file reader for DSpace metadata fields (DC) mapping
* to OpenURL parameters.
* <p>
* This class reads the [dspace]/config/sfx.xml configuration file, which
* contains pairs of DSpace item metadata values and OpenURL parameter names.
* Then it takes an item and constructs an OpenURL for it with values of
* parameters filled in from the paired metadata fields.
* </p>
*
* @author Stuart Lewis
* @author Graham Triggs
* @version $Revision$
*/
public class SFXFileReader {
/** The SFX configuration file */
private static Document doc;
/** log4j logger */
private static final Logger log = Logger.getLogger(SFXFileReader.class);
/**
* Loads the SFX configuration file
*
* @param fileName The name of the SFX configuration file
* @param item The item to process, from which metadata values will be taken
*
* @return the SFX string
* @throws IOException
*/
public static String loadSFXFile(String fileName, Item item) throws IOException
{
// Parse XML file -> XML document will be built
if (doc == null)
{
doc = parseFile(fileName);
}
// Return final sfx Query String
return doNodes(doc, item);
}
/** Parses XML file and returns XML document.
* @param fileName XML file to parse
* @return XML document or <B>null</B> if error occured. The error is caught and logged.
*/
public static Document parseFile(String fileName) {
log.info("Parsing XML file... " + fileName);
DocumentBuilder docBuilder;
Document doc = null;
DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
docBuilderFactory.setIgnoringElementContentWhitespace(true);
try {
docBuilder = docBuilderFactory.newDocumentBuilder();
}
catch (ParserConfigurationException e) {
log.error("Wrong parser configuration: " + e.getMessage());
return null;
}
File sourceFile = new File(fileName);
try {
doc = docBuilder.parse(sourceFile);
}
catch (SAXException e) {
log.error("Wrong XML file structure: " + e.getMessage());
return null;
}
catch (IOException e) {
log.error("Could not read source file: " + e.getMessage());
}
log.info("XML file parsed");
return doc;
}
/**
* Process the item, mapping each of its metadata fields defined in the
* configuration file to an OpenURL parameter
*
* @param node DOM node of the mapping pair in the XML file (field element)
* @param item The item to process, from which metadata values will be taken
* @return processed fields.
* @throws IOException
*/
public static String doNodes(Node node, Item item) throws IOException
{
if (node == null)
{
log.error (" Empty Node ");
return null;
}
Node e = getElement(node);
NodeList nl = e.getChildNodes();
int len = nl.getLength();
String sfxfield = "";
int i = 0;
while ((i < len) && StringUtils.isEmpty(sfxfield))
{
Node nd = nl.item(i);
if ((nd == null) || isEmptyTextNode(nd))
{
i++;
continue;
}
String tagName = nd.getNodeName();
if (tagName.equals("query-pairs"))
{
sfxfield = processFields(nd, item);
}
i++;
}
log.info("Process fields : " + sfxfield);
return sfxfield;
}
/**
* Process the field nodes, mapping each metadata field defined in the
* configuration file to an OpenURL parameter
*
* @param e DOM node of the mapping pair in the XML file (field element)
* @param item The item to process, from which metadata values will be taken
* @return assembled OpenURL query.
* @throws IOException
*/
private static String processFields(Node e, Item item) throws IOException
{
NodeList cl = e.getChildNodes();
int lench = cl.getLength();
String myquery = "";
for (int j = 0; j < lench; j++)
{
Node nch = cl.item(j);
String querystring = "";
String schema = "";
String qualifier = "";
String element = "";
if (nch.getNodeName().equals("field"))
{
NodeList pl = nch.getChildNodes();
int plen = pl.getLength();
int finish = 0;
for (int k = 0; k < plen; k++)
{
Node vn= pl.item(k);
String vName = vn.getNodeName();
if (vName.equals("querystring"))
{
querystring = getValue(vn);
finish ++;
}
else if (vName.equals("dc-schema"))
{
schema = getValue(vn);
finish ++;
}
else if (vName.equals("dc-element"))
{
element = getValue(vn);
finish ++;
}
else if (vName.equals("dc-qualifier"))
{
qualifier = getValue(vn);
finish ++;
if (StringUtils.isEmpty(qualifier))
{
qualifier = null;
}
}
if (finish == 4)
{
Metadatum[] dcvalue = item.getMetadata(schema, element, qualifier, Item.ANY);
if (dcvalue.length > 0)
{
// Issued Date
if (element.equals("date") && qualifier.equals("issued"))
{
String fullDate = dcvalue[0].value;
// Remove the time if there is one - day is greatest granularity for SFX
if (fullDate.length() > 10)
{
fullDate = fullDate.substring(0, 10);
}
if (myquery.equals(""))
{ myquery = querystring + URLEncoder.encode(fullDate, Constants.DEFAULT_ENCODING); }
else
{ myquery = myquery + "&" + querystring + URLEncoder.encode(fullDate, Constants.DEFAULT_ENCODING); }
}
else
{
// Contributor Author
if (element.equals("contributor") && qualifier.equals("author"))
{
DCPersonName dpn = new DCPersonName(dcvalue[0].value);
String dpnName = dcvalue[0].value;
if (querystring.endsWith("aulast=")) { dpnName = dpn.getLastName(); }
else { if (querystring.endsWith("aufirst=")) { dpnName = dpn.getFirstNames(); }}
if (myquery.equals(""))
{ myquery = querystring + URLEncoder.encode(dpnName, Constants.DEFAULT_ENCODING); }
else
{ myquery = myquery + "&" + querystring + URLEncoder.encode(dpnName, Constants.DEFAULT_ENCODING); }
}
else
{
if (myquery.equals(""))
{ myquery = querystring + URLEncoder.encode(dcvalue[0].value, Constants.DEFAULT_ENCODING);}
else
{ myquery = myquery + "&" + querystring + URLEncoder.encode(dcvalue[0].value, Constants.DEFAULT_ENCODING);}
}
}
} // if dc.length > 0
finish = 0;
querystring = "";
schema = "";
element = "";
qualifier = "";
} // if finish == 4
} //for k
} // if field
} // for j
return myquery;
}
/** Returns element node
* @param node element (it is XML tag)
* @return Element node otherwise null
*/
public static Node getElement(Node node)
{
NodeList child = node.getChildNodes();
int length = child.getLength();
for (int i = 0; i < length; i++)
{
Node kid = child.item(i);
if (kid.getNodeType() == Node.ELEMENT_NODE)
{
return kid;
}
}
return null;
}
/** Is Empty text Node **/
public static boolean isEmptyTextNode(Node nd)
{
boolean isEmpty = false;
if (nd.getNodeType() == Node.TEXT_NODE)
{
String text = nd.getNodeValue().trim();
if (text.length() == 0)
{
isEmpty = true;
}
}
return isEmpty;
}
/**
* Returns the value of the node's attribute named <name>
**/
public static String getAttribute(Node e, String name)
{
NamedNodeMap attrs = e.getAttributes();
int len = attrs.getLength();
if (len > 0)
{
for (int i = 0; i < len; i++)
{
Node attr = attrs.item(i);
if (name.equals(attr.getNodeName()))
{
return attr.getNodeValue().trim();
}
}
}
//no such attribute
return null;
}
/**
* Returns the value found in the Text node (if any) in the
* node list that's passed in.
*/
public static String getValue(Node node)
{
NodeList child = node.getChildNodes();
for (int i = 0; i < child.getLength(); i++)
{
Node kid = child.item(i);
short type = kid.getNodeType();
if (type == Node.TEXT_NODE)
{
return kid.getNodeValue().trim();
}
}
// Didn't find a text node
return null;
}
}

View File

@@ -0,0 +1,290 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.sfx;
import java.io.File;
import java.io.IOException;
import java.net.URLEncoder;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.app.sfx.service.SFXFileReaderService;
import org.dspace.content.MetadataValue;
import org.dspace.content.service.ItemService;
import org.springframework.beans.factory.annotation.Autowired;
import org.w3c.dom.Document;
import org.dspace.content.DCPersonName;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.ParserConfigurationException;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
* XML configuration file reader for DSpace metadata fields (DC) mapping
* to OpenURL parameters.
* <p>
* This class reads the [dspace]/config/sfx.xml configuration file, which
* contains pairs of DSpace item metadata values and OpenURL parameter names.
* Then it takes an item and constructs an OpenURL for it with values of
* parameters filled in from the paired metadata fields.
* </p>
*
* @author Stuart Lewis
* @author Graham Triggs
* @version $Revision$
*/
public class SFXFileReaderServiceImpl implements SFXFileReaderService {
/**
* The SFX configuration file
*/
protected Document doc;
@Autowired(required = true)
protected ItemService itemService;
/**
* log4j logger
*/
private final Logger log = Logger.getLogger(SFXFileReaderServiceImpl.class);
protected SFXFileReaderServiceImpl() {
}
@Override
public String loadSFXFile(String fileName, Item item) throws IOException {
// Parse XML file -> XML document will be built
if (doc == null) {
doc = parseFile(fileName);
}
// Return final sfx Query String
return doNodes(doc, item);
}
@Override
public Document parseFile(String fileName)
{
log.info("Parsing XML file... " + fileName);
DocumentBuilder docBuilder;
Document doc = null;
DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
docBuilderFactory.setIgnoringElementContentWhitespace(true);
try {
docBuilder = docBuilderFactory.newDocumentBuilder();
} catch (ParserConfigurationException e) {
log.error("Wrong parser configuration: " + e.getMessage());
return null;
}
File sourceFile = new File(fileName);
try {
doc = docBuilder.parse(sourceFile);
} catch (SAXException e) {
log.error("Wrong XML file structure: " + e.getMessage());
return null;
} catch (IOException e) {
log.error("Could not read source file: " + e.getMessage());
}
log.info("XML file parsed");
return doc;
}
@Override
public String doNodes(Node node, Item item) throws IOException {
if (node == null) {
log.error(" Empty Node ");
return null;
}
Node e = getElement(node);
NodeList nl = e.getChildNodes();
int len = nl.getLength();
String sfxfield = "";
int i = 0;
while ((i < len) && StringUtils.isEmpty(sfxfield)) {
Node nd = nl.item(i);
if ((nd == null) || isEmptyTextNode(nd)) {
i++;
continue;
}
String tagName = nd.getNodeName();
if (tagName.equals("query-pairs")) {
sfxfield = processFields(nd, item);
}
i++;
}
log.info("Process fields : " + sfxfield);
return sfxfield;
}
/**
* Process the field nodes, mapping each metadata field defined in the
* configuration file to an OpenURL parameter
*
* @param e DOM node of the mapping pair in the XML file (field element)
* @param item The item to process, from which metadata values will be taken
* @return assembled OpenURL query.
* @throws IOException
*/
protected String processFields(Node e, Item item) throws IOException {
NodeList cl = e.getChildNodes();
int lench = cl.getLength();
String myquery = "";
for (int j = 0; j < lench; j++) {
Node nch = cl.item(j);
String querystring = "";
String schema = "";
String qualifier = "";
String element = "";
if (nch.getNodeName().equals("field")) {
NodeList pl = nch.getChildNodes();
int plen = pl.getLength();
int finish = 0;
for (int k = 0; k < plen; k++) {
Node vn = pl.item(k);
String vName = vn.getNodeName();
if (vName.equals("querystring")) {
querystring = getValue(vn);
finish++;
} else if (vName.equals("dc-schema")) {
schema = getValue(vn);
finish++;
} else if (vName.equals("dc-element")) {
element = getValue(vn);
finish++;
} else if (vName.equals("dc-qualifier")) {
qualifier = getValue(vn);
finish++;
if (StringUtils.isEmpty(qualifier)) {
qualifier = null;
}
}
if (finish == 4) {
List<MetadataValue> dcvalue = itemService.getMetadata(item, schema, element, qualifier, Item.ANY);
if (dcvalue.size() > 0) {
// Issued Date
if (element.equals("date") && qualifier.equals("issued")) {
String fullDate = dcvalue.get(0).getValue();
// Remove the time if there is one - day is greatest granularity for SFX
if (fullDate.length() > 10) {
fullDate = fullDate.substring(0, 10);
}
if (myquery.equals("")) {
myquery = querystring + URLEncoder.encode(fullDate, Constants.DEFAULT_ENCODING);
} else {
myquery = myquery + "&" + querystring + URLEncoder.encode(fullDate, Constants.DEFAULT_ENCODING);
}
} else {
// Contributor Author
if (element.equals("contributor") && qualifier.equals("author")) {
DCPersonName dpn = new DCPersonName(dcvalue.get(0).getValue());
String dpnName = dcvalue.get(0).getValue();
if (querystring.endsWith("aulast=")) {
dpnName = dpn.getLastName();
} else {
if (querystring.endsWith("aufirst=")) {
dpnName = dpn.getFirstNames();
}
}
if (myquery.equals("")) {
myquery = querystring + URLEncoder.encode(dpnName, Constants.DEFAULT_ENCODING);
} else {
myquery = myquery + "&" + querystring + URLEncoder.encode(dpnName, Constants.DEFAULT_ENCODING);
}
} else {
if (myquery.equals("")) {
myquery = querystring + URLEncoder.encode(dcvalue.get(0).getValue(), Constants.DEFAULT_ENCODING);
} else {
myquery = myquery + "&" + querystring + URLEncoder.encode(dcvalue.get(0).getValue(), Constants.DEFAULT_ENCODING);
}
}
}
} // if dc.length > 0
finish = 0;
querystring = "";
schema = "";
element = "";
qualifier = "";
} // if finish == 4
} //for k
} // if field
} // for j
return myquery;
}
@Override
public Node getElement(Node node) {
NodeList child = node.getChildNodes();
int length = child.getLength();
for (int i = 0; i < length; i++) {
Node kid = child.item(i);
if (kid.getNodeType() == Node.ELEMENT_NODE) {
return kid;
}
}
return null;
}
@Override
public boolean isEmptyTextNode(Node nd) {
boolean isEmpty = false;
if (nd.getNodeType() == Node.TEXT_NODE) {
String text = nd.getNodeValue().trim();
if (text.length() == 0) {
isEmpty = true;
}
}
return isEmpty;
}
@Override
public String getAttribute(Node e, String name) {
NamedNodeMap attrs = e.getAttributes();
int len = attrs.getLength();
if (len > 0) {
for (int i = 0; i < len; i++) {
Node attr = attrs.item(i);
if (name.equals(attr.getNodeName())) {
return attr.getNodeValue().trim();
}
}
}
//no such attribute
return null;
}
@Override
public String getValue(Node node) {
NodeList child = node.getChildNodes();
for (int i = 0; i < child.getLength(); i++) {
Node kid = child.item(i);
short type = kid.getNodeType();
if (type == Node.TEXT_NODE) {
return kid.getNodeValue().trim();
}
}
// Didn't find a text node
return null;
}
}

View File

@@ -0,0 +1,25 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.sfx.factory;
import org.dspace.app.sfx.service.SFXFileReaderService;
import org.dspace.utils.DSpace;
/**
* Abstract factory to get services for the sfx package, use SfxServiceFactory.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
public abstract class SfxServiceFactory {
public abstract SFXFileReaderService getSfxFileReaderService();
public static SfxServiceFactory getInstance(){
return new DSpace().getServiceManager().getServiceByName("sfxServiceFactory", SfxServiceFactory.class);
}
}

View File

@@ -0,0 +1,27 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.sfx.factory;
import org.dspace.app.sfx.service.SFXFileReaderService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Factory implementation to get services for the sfx package, use SfxServiceFactory.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
public class SfxServiceFactoryImpl extends SfxServiceFactory {
@Autowired(required = true)
private SFXFileReaderService sfxFileReaderService;
@Override
public SFXFileReaderService getSfxFileReaderService() {
return sfxFileReaderService;
}
}

View File

@@ -0,0 +1,83 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.sfx.service;
import org.dspace.content.Item;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import java.io.IOException;
/**
* XML configuration file reader for DSpace metadata fields (DC) mapping
* to OpenURL parameters.
* <p>
* This class reads the [dspace]/config/sfx.xml configuration file, which
* contains pairs of DSpace item metadata values and OpenURL parameter names.
* Then it takes an item and constructs an OpenURL for it with values of
* parameters filled in from the paired metadata fields.
* </p>
*
* @author Stuart Lewis
* @author Graham Triggs
* @version $Revision$
*/
public interface SFXFileReaderService {
/**
* Loads the SFX configuration file
*
* @param fileName The name of the SFX configuration file
* @param item The item to process, from which metadata values will be taken
*
* @return the SFX string
* @throws java.io.IOException
*/
public String loadSFXFile(String fileName, Item item) throws IOException;
/** Parses XML file and returns XML document.
* @param fileName XML file to parse
* @return XML document or <B>null</B> if error occurred. The error is caught and logged.
*/
public Document parseFile(String fileName);
/**
* Process the item, mapping each of its metadata fields defined in the
* configuration file to an OpenURL parameter
*
* @param node DOM node of the mapping pair in the XML file (field element)
* @param item The item to process, from which metadata values will be taken
* @return processed fields.
* @throws IOException
*/
public String doNodes(Node node, Item item) throws IOException;
/**
* Returns element node
*
* @param node element (it is XML tag)
* @return Element node otherwise null
*/
public Node getElement(Node node);
/**
* Is Empty text Node *
*/
public boolean isEmptyTextNode(Node nd);
/**
* Returns the value of the node's attribute named <name>
*/
public String getAttribute(Node e, String name);
/**
* Returns the value found in the Text node (if any) in the
* node list that's passed in.
*/
public String getValue(Node node);
}

View File

@@ -55,8 +55,6 @@ public class SHERPAResponse
Document inDoc = db.parse(xmlData);
Element xmlRoot = inDoc.getDocumentElement();
Element dataRoot = XMLUtils.getSingleElement(xmlRoot, "romeoapi");
Element headersElement = XMLUtils.getSingleElement(xmlRoot,
"header");
Element journalsElement = XMLUtils.getSingleElement(xmlRoot,

View File

@@ -26,7 +26,7 @@ public class SHERPAService
private int maxNumberOfTries;
private long sleepBetweenTimeouts;
private int timeout;
private int timeout = 5000;
/** log4j category */
private static final Logger log = Logger.getLogger(SHERPAService.class);

View File

@@ -10,7 +10,10 @@ package org.dspace.app.sherpa.submit;
import java.util.List;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
public interface ISSNItemExtractor
{

View File

@@ -10,12 +10,17 @@ package org.dspace.app.sherpa.submit;
import java.util.ArrayList;
import java.util.List;
import org.dspace.content.Metadatum;
import org.dspace.content.MetadataValue;
import org.dspace.content.Item;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
public class MetadataAuthorityISSNExtractor implements ISSNItemExtractor
{
@Autowired(required = true)
public ItemService itemService;
private List<String> metadataList;
public void setMetadataList(List<String> metadataList)
@@ -29,10 +34,10 @@ public class MetadataAuthorityISSNExtractor implements ISSNItemExtractor
List<String> values = new ArrayList<String>();
for (String metadata : metadataList)
{
Metadatum[] dcvalues = item.getMetadataByMetadataString(metadata);
for (Metadatum dcvalue : dcvalues)
List<MetadataValue> dcvalues = itemService.getMetadataByMetadataString(item, metadata);
for (MetadataValue dcvalue : dcvalues)
{
values.add(dcvalue.authority);
values.add(dcvalue.getAuthority());
}
}
return values;

View File

@@ -10,12 +10,17 @@ package org.dspace.app.sherpa.submit;
import java.util.ArrayList;
import java.util.List;
import org.dspace.content.Metadatum;
import org.dspace.content.MetadataValue;
import org.dspace.content.Item;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
public class MetadataValueISSNExtractor implements ISSNItemExtractor
{
@Autowired(required = true)
public ItemService itemService;
private List<String> metadataList;
public void setMetadataList(List<String> metadataList)
@@ -29,10 +34,10 @@ public class MetadataValueISSNExtractor implements ISSNItemExtractor
List<String> values = new ArrayList<String>();
for (String metadata : metadataList)
{
Metadatum[] dcvalues = item.getMetadataByMetadataString(metadata);
for (Metadatum dcvalue : dcvalues)
List<MetadataValue> dcvalues = itemService.getMetadataByMetadataString(item, metadata);
for (MetadataValue dcvalue : dcvalues)
{
values.add(dcvalue.value);
values.add(dcvalue.getValue());
}
}
return values;

View File

@@ -33,7 +33,7 @@ import java.util.zip.GZIPOutputStream;
* }
* g.finish();
* </pre>
*
*
* @author Robert Tansley
*/
public abstract class AbstractGenerator
@@ -59,7 +59,7 @@ public abstract class AbstractGenerator
/**
* Initialize this generator to write to the given directory. This must be
* called by any subclass constructor.
*
*
* @param outputDirIn
* directory to write sitemap files to
*/
@@ -73,7 +73,7 @@ public abstract class AbstractGenerator
/**
* Start writing a new sitemap file.
*
*
* @throws IOException
* if an error occurs creating the file
*/
@@ -97,7 +97,7 @@ public abstract class AbstractGenerator
/**
* Add the given URL to the sitemap.
*
*
* @param url
* Full URL to add
* @param lastMod
@@ -129,7 +129,7 @@ public abstract class AbstractGenerator
/**
* Finish with the current sitemap file.
*
*
* @throws IOException
* if an error occurs writing
*/
@@ -144,15 +144,18 @@ public abstract class AbstractGenerator
* Complete writing sitemap files and write the index files. This is invoked
* when all calls to {@link AbstractGenerator#addURL(String, Date)} have
* been completed, and invalidates the generator.
*
*
* @return number of sitemap files written.
*
*
* @throws IOException
* if an error occurs writing
*/
public int finish() throws IOException
{
closeCurrentFile();
if (null != currentOutput)
{
closeCurrentFile();
}
OutputStream fo = new FileOutputStream(new File(outputDir,
getIndexFilename()));
@@ -165,13 +168,13 @@ public abstract class AbstractGenerator
PrintStream out = new PrintStream(fo);
writeIndex(out, fileCount);
out.close();
return fileCount;
}
/**
* Return marked-up text to be included in a sitemap about a given URL.
*
*
* @param url
* URL to add information about
* @param lastMod
@@ -183,14 +186,14 @@ public abstract class AbstractGenerator
/**
* Return the boilerplate at the top of a sitemap file.
*
*
* @return The boilerplate markup.
*/
public abstract String getLeadingBoilerPlate();
/**
* Return the boilerplate at the end of a sitemap file.
*
*
* @return The boilerplate markup.
*/
public abstract String getTrailingBoilerPlate();
@@ -198,7 +201,7 @@ public abstract class AbstractGenerator
/**
* Return the maximum size in bytes that an individual sitemap file should
* be.
*
*
* @return the size in bytes.
*/
public abstract int getMaxSize();
@@ -206,7 +209,7 @@ public abstract class AbstractGenerator
/**
* Return the maximum number of URLs that an individual sitemap file should
* contain.
*
*
* @return the maximum number of URLs.
*/
public abstract int getMaxURLs();
@@ -214,7 +217,7 @@ public abstract class AbstractGenerator
/**
* Return whether the written sitemap files and index should be
* GZIP-compressed.
*
*
* @return {@code true} if GZIP compression should be used, {@code false}
* otherwise.
*/
@@ -222,7 +225,7 @@ public abstract class AbstractGenerator
/**
* Return the filename a sitemap at the given index should be stored at.
*
*
* @param number
* index of the sitemap file (zero is first).
* @return the filename to write the sitemap to.
@@ -231,14 +234,14 @@ public abstract class AbstractGenerator
/**
* Get the filename the index should be written to.
*
*
* @return the filename of the index.
*/
public abstract String getIndexFilename();
/**
* Write the index file.
*
*
* @param output
* stream to write the index to
* @param sitemapCount

View File

@@ -18,6 +18,8 @@ import java.net.URL;
import java.net.URLEncoder;
import java.sql.SQLException;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
@@ -30,7 +32,10 @@ import org.apache.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.content.ItemIterator;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
@@ -46,6 +51,10 @@ public class GenerateSitemaps
/** Logger */
private static Logger log = Logger.getLogger(GenerateSitemaps.class);
private static final CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
private static final CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
private static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
public static void main(String[] args) throws Exception
{
final String usage = GenerateSitemaps.class.getCanonicalName();
@@ -178,88 +187,71 @@ public class GenerateSitemaps
Context c = new Context();
Community[] comms = Community.findAll(c);
List<Community> comms = communityService.findAll(c);
for (int i = 0; i < comms.length; i++)
{
String url = handleURLStem + comms[i].getHandle();
for (Community comm : comms) {
String url = handleURLStem + comm.getHandle();
if (makeHTMLMap)
{
if (makeHTMLMap) {
html.addURL(url, null);
}
if (makeSitemapOrg)
{
if (makeSitemapOrg) {
sitemapsOrg.addURL(url, null);
}
}
Collection[] colls = Collection.findAll(c);
List<Collection> colls = collectionService.findAll(c);
for (int i = 0; i < colls.length; i++)
{
String url = handleURLStem + colls[i].getHandle();
for (Collection coll : colls) {
String url = handleURLStem + coll.getHandle();
if (makeHTMLMap)
{
if (makeHTMLMap) {
html.addURL(url, null);
}
if (makeSitemapOrg)
{
if (makeSitemapOrg) {
sitemapsOrg.addURL(url, null);
}
}
ItemIterator allItems = Item.findAll(c);
try
Iterator<Item> allItems = itemService.findAll(c);
int itemCount = 0;
while (allItems.hasNext())
{
int itemCount = 0;
while (allItems.hasNext())
{
Item i = allItems.next();
String url = handleURLStem + i.getHandle();
Date lastMod = i.getLastModified();
if (makeHTMLMap)
{
html.addURL(url, lastMod);
}
if (makeSitemapOrg)
{
sitemapsOrg.addURL(url, lastMod);
}
i.decache();
itemCount++;
}
Item i = allItems.next();
String url = handleURLStem + i.getHandle();
Date lastMod = i.getLastModified();
if (makeHTMLMap)
{
int files = html.finish();
log.info(LogManager.getHeader(c, "write_sitemap",
"type=html,num_files=" + files + ",communities="
+ comms.length + ",collections=" + colls.length
+ ",items=" + itemCount));
html.addURL(url, lastMod);
}
if (makeSitemapOrg)
{
int files = sitemapsOrg.finish();
log.info(LogManager.getHeader(c, "write_sitemap",
"type=html,num_files=" + files + ",communities="
+ comms.length + ",collections=" + colls.length
+ ",items=" + itemCount));
sitemapsOrg.addURL(url, lastMod);
}
itemCount++;
}
finally
if (makeHTMLMap)
{
if (allItems != null)
{
allItems.close();
}
int files = html.finish();
log.info(LogManager.getHeader(c, "write_sitemap",
"type=html,num_files=" + files + ",communities="
+ comms.size() + ",collections=" + colls.size()
+ ",items=" + itemCount));
}
if (makeSitemapOrg)
{
int files = sitemapsOrg.finish();
log.info(LogManager.getHeader(c, "write_sitemap",
"type=html,num_files=" + files + ",communities="
+ comms.size() + ",collections=" + colls.size()
+ ",items=" + itemCount));
}
c.abort();
}

View File

@@ -23,10 +23,10 @@ import java.util.Date;
public class HTMLSitemapGenerator extends AbstractGenerator
{
/** Stem of URLs sitemaps will eventually appear at */
private String indexURLStem;
protected String indexURLStem;
/** Tail of URLs sitemaps will eventually appear at */
private String indexURLTail;
protected String indexURLTail;
/**
* Construct an HTML sitemap generator, writing files to the given
@@ -50,33 +50,39 @@ public class HTMLSitemapGenerator extends AbstractGenerator
indexURLTail = (urlTail == null ? "" : urlTail);
}
@Override
public String getFilename(int number)
{
return "sitemap" + number + ".html";
}
@Override
public String getLeadingBoilerPlate()
{
return "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\" \"http://www.w3.org/TR/html4/strict.dtd\">\n"
+ "<html><head><title>URL List</title></head><body><ul>";
}
@Override
public int getMaxSize()
{
// 50k
return 51200;
}
@Override
public int getMaxURLs()
{
return 1000;
}
@Override
public String getTrailingBoilerPlate()
{
return "</ul></body></html>\n";
}
@Override
public String getURLText(String url, Date lastMod)
{
StringBuffer urlText = new StringBuffer();
@@ -87,16 +93,19 @@ public class HTMLSitemapGenerator extends AbstractGenerator
return urlText.toString();
}
@Override
public boolean useCompression()
{
return false;
}
@Override
public String getIndexFilename()
{
return "sitemap_index.html";
}
@Override
public void writeIndex(PrintStream output, int sitemapCount)
throws IOException
{

View File

@@ -25,13 +25,13 @@ import java.util.Date;
public class SitemapsOrgGenerator extends AbstractGenerator
{
/** Stem of URLs sitemaps will eventually appear at */
private String indexURLStem;
protected String indexURLStem;
/** Tail of URLs sitemaps will eventually appear at */
private String indexURLTail;
protected String indexURLTail;
/** The correct date format */
private DateFormat w3dtfFormat = new SimpleDateFormat(
protected DateFormat w3dtfFormat = new SimpleDateFormat(
"yyyy-MM-dd'T'HH:mm:ss'Z'");
/**
@@ -56,33 +56,39 @@ public class SitemapsOrgGenerator extends AbstractGenerator
indexURLTail = (urlTail == null ? "" : urlTail);
}
@Override
public String getFilename(int number)
{
return "sitemap" + number + ".xml.gz";
}
@Override
public String getLeadingBoilerPlate()
{
return "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
+ "<urlset xmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\">";
}
@Override
public int getMaxSize()
{
// 10 Mb
return 10485760;
}
@Override
public int getMaxURLs()
{
return 50000;
}
@Override
public String getTrailingBoilerPlate()
{
return "</urlset>";
}
@Override
public String getURLText(String url, Date lastMod)
{
StringBuffer urlText = new StringBuffer();
@@ -98,16 +104,19 @@ public class SitemapsOrgGenerator extends AbstractGenerator
return urlText.toString();
}
@Override
public boolean useCompression()
{
return true;
}
@Override
public String getIndexFilename()
{
return "sitemap_index.xml.gz";
}
@Override
public void writeIndex(PrintStream output, int sitemapCount)
throws IOException
{

View File

@@ -69,6 +69,7 @@ public class HTMLReport implements Report
*
* @return the HTML report
*/
@Override
public String render()
{
StringBuffer frag = new StringBuffer();
@@ -165,6 +166,7 @@ public class HTMLReport implements Report
*
* @param stat the statistics object to be added to the report
*/
@Override
public void addBlock(Statistics stat)
{
blocks.add(stat);
@@ -177,6 +179,7 @@ public class HTMLReport implements Report
*
* @param start the start date for the report
*/
@Override
public void setStartDate(Date start)
{
this.start = (start == null ? null : new Date(start.getTime()));
@@ -188,6 +191,7 @@ public class HTMLReport implements Report
*
* @param end the end date for the report
*/
@Override
public void setEndDate(Date end)
{
this.end = (end == null ? null : new Date(end.getTime()));
@@ -200,6 +204,7 @@ public class HTMLReport implements Report
*
* @return a string containing date range information
*/
@Override
public String dateRange()
{
StringBuffer frag = new StringBuffer();
@@ -238,6 +243,7 @@ public class HTMLReport implements Report
*
* @return a string containing the title of the report
*/
@Override
public String mainTitle()
{
return "<div class=\"reportTitle\"><a name=\"top\">" + mainTitle + "</a></div>\n\n";
@@ -250,6 +256,7 @@ public class HTMLReport implements Report
* @param name the name of the service
* @param serverName the name of the server
*/
@Override
public void setMainTitle(String name, String serverName)
{
mainTitle = "Statistics for " + name + " on " + serverName;
@@ -266,6 +273,7 @@ public class HTMLReport implements Report
*
* @return a string containing the header for the report
*/
@Override
public String header()
{
return header("");
@@ -277,6 +285,7 @@ public class HTMLReport implements Report
*
* @param title the title of the item being headered
*/
@Override
public String header(String title)
{
// FIXME: this need to be figured out to integrate nicely into the
@@ -309,6 +318,7 @@ public class HTMLReport implements Report
*
* @return a string containing the section title HTML formatted
*/
@Override
public String sectionHeader(String title)
{
// prepare the title to be an <a name="#title"> style link
@@ -332,6 +342,7 @@ public class HTMLReport implements Report
*
* @return a string containing the statistics block HTML formatted
*/
@Override
public String statBlock(Statistics content)
{
StringBuffer frag = new StringBuffer();
@@ -417,6 +428,7 @@ public class HTMLReport implements Report
*
* @return a string containing floor information HTML formatted
*/
@Override
public String floorInfo(int floor)
{
if (floor > 0)
@@ -440,6 +452,7 @@ public class HTMLReport implements Report
*
* @return a string containing an explanaton HTML formatted
*/
@Override
public String blockExplanation(String explanation)
{
if (explanation != null)
@@ -461,6 +474,7 @@ public class HTMLReport implements Report
*
* @return a string containing the report footer
*/
@Override
public String footer()
{
return "";

View File

@@ -7,12 +7,14 @@
*/
package org.dspace.app.statistics;
import org.dspace.content.MetadataSchema;
import org.apache.commons.lang3.StringUtils;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.SearchUtils;
import java.sql.SQLException;
@@ -291,8 +293,7 @@ public class LogAnalyser
String myFileTemplate, String myConfigFile,
String myOutFile, Date myStartDate,
Date myEndDate, boolean myLookUp)
throws IOException, SQLException
{
throws IOException, SQLException, SearchServiceException {
// FIXME: perhaps we should have all parameters and aggregators put
// together in a single aggregating object
@@ -1141,132 +1142,42 @@ public class LogAnalyser
* @return an integer containing the relevant count
*/
public static Integer getNumItems(Context context, String type)
throws SQLException
{
boolean oracle = DatabaseManager.isOracle();
throws SQLException, SearchServiceException {
// FIXME: this method is clearly not optimised
// FIXME: we don't yet collect total statistics, such as number of items
// withdrawn, number in process of submission etc. We should probably do
// that
// start the type constraint
String typeQuery = null;
if (type != null)
DiscoverQuery discoverQuery = new DiscoverQuery();
if(StringUtils.isNotBlank(type))
{
typeQuery = "SELECT item_id " +
"FROM metadatavalue " +
"WHERE text_value LIKE '%" + type + "%' " +
"AND metadata_field_id = (" +
" SELECT metadata_field_id " +
" FROM metadatafieldregistry " +
" WHERE metadata_schema_id = (" +
" SELECT metadata_schema_id" +
" FROM MetadataSchemaRegistry" +
" WHERE short_id = '" + MetadataSchema.DC_SCHEMA + "')" +
" AND element = 'type' " +
" AND qualifier IS NULL) ";
discoverQuery.addFilterQueries("dc.type=" + type +"*");
}
// start the date constraint query buffer
StringBuffer dateQuery = new StringBuffer();
if (oracle)
StringBuilder accessionedQuery = new StringBuilder();
accessionedQuery.append("dc.date.accessioned_dt:[");
if(startDate != null)
{
dateQuery.append("SELECT /*+ ORDERED_PREDICATES */ item_id ");
accessionedQuery.append(unParseDate(startDate));
}
else
{
dateQuery.append("SELECT item_id ");
accessionedQuery.append("*");
}
dateQuery.append("FROM metadatavalue " +
"WHERE metadata_field_id = (" +
" SELECT metadata_field_id " +
" FROM metadatafieldregistry " +
" WHERE metadata_schema_id = (" +
" SELECT metadata_schema_id" +
" FROM MetadataSchemaRegistry" +
" WHERE short_id = '" + MetadataSchema.DC_SCHEMA + "')" +
" AND element = 'date' " +
" AND qualifier = 'accessioned') ");
// Verifies that the metadata contains a valid date, otherwise the
// postgres queries blow up when doing the ::timestamp cast.
if (!oracle && (startDate != null || endDate != null)) {
dateQuery.append(" AND text_value LIKE '____-__-__T__:__:__Z' ");
}
if (startDate != null)
accessionedQuery.append(" TO ");
if(endDate != null)
{
if (oracle)
{
dateQuery.append(" AND TO_TIMESTAMP( TO_CHAR(text_value), "+
"'yyyy-mm-dd\"T\"hh24:mi:ss\"Z\"' ) >= TO_DATE('" +
unParseDate(startDate) + "', 'yyyy-MM-dd\"T\"hh24:mi:ss\"Z\"') ");
}
else
{
dateQuery.append(" AND text_value::timestamp >= '" +
unParseDate(startDate) + "'::timestamp ");
}
}
if (endDate != null)
{
// adjust end date to account for timestamp comparison
GregorianCalendar realEndDate = new GregorianCalendar();
realEndDate.setTime(endDate);
realEndDate.add(Calendar.DAY_OF_MONTH, 1);
Date queryEndDate = realEndDate.getTime();
if (oracle)
{
dateQuery.append(" AND TO_TIMESTAMP( TO_CHAR(text_value), "+
"'yyyy-mm-dd\"T\"hh24:mi:ss\"Z\"' ) < TO_DATE('" +
unParseDate(queryEndDate) + "', 'yyyy-MM-dd\"T\"hh24:mi:ss\"Z\"') ");
}
else
{
dateQuery.append(" AND text_value::timestamp < '" +
unParseDate(queryEndDate) + "'::timestamp ");
}
}
// build the final query
StringBuffer query = new StringBuffer();
query.append("SELECT COUNT(*) AS num " +
"FROM item " +
"WHERE in_archive = " + (oracle ? "1 " : "true ") +
"AND withdrawn = " + (oracle ? "0 " : "false "));
if (startDate != null || endDate != null)
{
query.append(" AND item_id IN ( " +
dateQuery.toString() + ") ");
}
if (type != null)
{
query.append(" AND item_id IN ( " +
typeQuery + ") ");
}
TableRow row = DatabaseManager.querySingle(context, query.toString());
Integer numItems;
if (oracle)
{
numItems = Integer.valueOf(row.getIntColumn("num"));
accessionedQuery.append(unParseDate(endDate));
}
else
{
// for some reason the number column is of "long" data type!
Long count = Long.valueOf(row.getLongColumn("num"));
numItems = Integer.valueOf(count.intValue());
accessionedQuery.append("*");
}
return numItems;
discoverQuery.addFilterQueries(accessionedQuery.toString());
discoverQuery.addFilterQueries("withdrawn: false");
discoverQuery.addFilterQueries("archived: true");
return SearchUtils.getSearchService().search(context, discoverQuery).getMaxResults();
}
@@ -1280,8 +1191,7 @@ public class LogAnalyser
* archive
*/
public static Integer getNumItems(Context context)
throws SQLException
{
throws SQLException, SearchServiceException {
return getNumItems(context, null);
}

View File

@@ -27,11 +27,15 @@ import java.util.StringTokenizer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.dspace.content.Metadatum;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataValue;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
/**
* This class performs the action of coordinating a usage report being
@@ -140,7 +144,10 @@ public class ReportGenerator
/** the log file action to human readable action map */
private static String map = ConfigurationManager.getProperty("dspace.dir") +
File.separator + "config" + File.separator + "dstat.map";
private static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
private static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
/**
* main method to be run from command line. See usage information for
@@ -785,7 +792,7 @@ public class ReportGenerator
// ensure that the handle exists
try
{
item = (Item) HandleManager.resolveToObject(context, handle);
item = (Item) handleService.resolveToObject(context, handle);
}
catch (Exception e)
{
@@ -801,24 +808,24 @@ public class ReportGenerator
// build the referece
// FIXME: here we have blurred the line between content and presentation
// and it should probably be un-blurred
Metadatum[] title = item.getDC("title", null, Item.ANY);
Metadatum[] author = item.getDC("contributor", "author", Item.ANY);
List<MetadataValue> title = itemService.getMetadata(item, MetadataSchema.DC_SCHEMA, "title", null, Item.ANY);
List<MetadataValue> author = itemService.getMetadata(item, MetadataSchema.DC_SCHEMA, "contributor", "author", Item.ANY);
StringBuffer authors = new StringBuffer();
if (author.length > 0)
if (author.size() > 0)
{
authors.append("(" + author[0].value);
authors.append("(" + author.get(0).getValue());
}
if (author.length > 1)
if (author.size() > 1)
{
authors.append(" et al");
}
if (author.length > 0)
if (author.size() > 0)
{
authors.append(")");
}
String content = title[0].value + " " + authors.toString();
String content = title.get(0).getValue() + " " + authors.toString();
return content;
}

View File

@@ -143,7 +143,8 @@ public class Stat implements Comparable
* @return +1, 0, -1 if o is less than, equal to, or greater than the
* current object value.
*/
public int compareTo(Object o)
@Override
public int compareTo(Object o)
{
int objectValue = ((Stat) o).getValue();

View File

@@ -101,6 +101,7 @@ public class StatisticsLoader
protected static Date[] sortDatesDescending(Date[] dates)
{
Arrays.sort(dates, new Comparator<Date>() {
@Override
public int compare(Date d1, Date d2)
{
if (d1 == null && d2 == null)
@@ -352,6 +353,7 @@ public class StatisticsLoader
*/
private static class AnalysisAndReportFilter implements FilenameFilter
{
@Override
public boolean accept(File dir, String name)
{
if (analysisMonthlyPattern.matcher(name).matches())

Some files were not shown because too many files have changed in this diff Show More