mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-07 01:54:22 +00:00
Compare commits
214 Commits
6ac823d29a
...
dspace-5.5
Author | SHA1 | Date | |
---|---|---|---|
![]() |
132f37a10a | ||
![]() |
98a26fa3e7 | ||
![]() |
4f5f5acdbe | ||
![]() |
212011cc75 | ||
![]() |
e7b49d8310 | ||
![]() |
a70f0bdd22 | ||
![]() |
a84763a258 | ||
![]() |
5a1028a7a9 | ||
![]() |
16b123e9df | ||
![]() |
f057ed8c07 | ||
![]() |
875bb59eb0 | ||
![]() |
2c09aea8fd | ||
![]() |
533245c8dd | ||
![]() |
875bba3add | ||
![]() |
55e623d1c2 | ||
![]() |
3ff604742b | ||
![]() |
3bfe7b8ea8 | ||
![]() |
ee62f9d6f0 | ||
![]() |
be35b0450b | ||
![]() |
8c94edc29c | ||
![]() |
2bf0275678 | ||
![]() |
86ca33eaa3 | ||
![]() |
f64d4b3367 | ||
![]() |
c908997900 | ||
![]() |
e2dd1089c9 | ||
![]() |
8809150e66 | ||
![]() |
1fd2723848 | ||
![]() |
454f40b3f4 | ||
![]() |
f05c9e794f | ||
![]() |
56fc41cac3 | ||
![]() |
0175e5edff | ||
![]() |
d17886c1cd | ||
![]() |
06668c363e | ||
![]() |
4b3a07120c | ||
![]() |
50c4a54bd6 | ||
![]() |
0aabf5d780 | ||
![]() |
04ce6ff2f4 | ||
![]() |
1f8f6241c2 | ||
![]() |
4a2f392ed8 | ||
![]() |
fac705ec3f | ||
![]() |
e1263249f5 | ||
![]() |
553b1a72c5 | ||
![]() |
6242865207 | ||
![]() |
59fa31641a | ||
![]() |
58344b610f | ||
![]() |
563d90f7c4 | ||
![]() |
131555604a | ||
![]() |
fbde108024 | ||
![]() |
2c59a9dd35 | ||
![]() |
d307c56d07 | ||
![]() |
1d2b954889 | ||
![]() |
69cfc61167 | ||
![]() |
b944ceb112 | ||
![]() |
9885ed851a | ||
![]() |
52ce1eb52b | ||
![]() |
deeef45943 | ||
![]() |
ad21875ac8 | ||
![]() |
4ee79a3d89 | ||
![]() |
c01c3af153 | ||
![]() |
f493a475fd | ||
![]() |
a3a5f562c9 | ||
![]() |
3479b0a254 | ||
![]() |
39289b6762 | ||
![]() |
edf7ea6524 | ||
![]() |
2045fee8ab | ||
![]() |
bac9beaffa | ||
![]() |
569ad5f546 | ||
![]() |
b465f26646 | ||
![]() |
ad19c3aeb6 | ||
![]() |
34c20d49ad | ||
![]() |
eaa08adb62 | ||
![]() |
15f3c247bc | ||
![]() |
2a44765f39 | ||
![]() |
87c34f1f1c | ||
![]() |
fce84880bc | ||
![]() |
50cb865ea2 | ||
![]() |
a9b8d8bfbc | ||
![]() |
600f680cd6 | ||
![]() |
01d7d060d7 | ||
![]() |
4a6663c2f4 | ||
![]() |
b3c87b2be7 | ||
![]() |
ac08b6a4e3 | ||
![]() |
a2f5fe34eb | ||
![]() |
ace19199e5 | ||
![]() |
6d9fa26535 | ||
![]() |
3efe549774 | ||
![]() |
734744ec4f | ||
![]() |
829c30bab4 | ||
![]() |
83cb04ed53 | ||
![]() |
0911d60290 | ||
![]() |
9bb7036857 | ||
![]() |
e0368f3ade | ||
![]() |
660217c3f9 | ||
![]() |
5f13b8cc64 | ||
![]() |
a2caabc79a | ||
![]() |
cb9710cda4 | ||
![]() |
56abebaece | ||
![]() |
0310db74aa | ||
![]() |
3e1bac69df | ||
![]() |
ec86af5a82 | ||
![]() |
79e111996b | ||
![]() |
f4c6f2680c | ||
![]() |
f3487be040 | ||
![]() |
87d0770974 | ||
![]() |
1c9fa656aa | ||
![]() |
59ff964f4f | ||
![]() |
10c4661885 | ||
![]() |
afe9c1294f | ||
![]() |
7a54972ed1 | ||
![]() |
b2cb0ef4dd | ||
![]() |
5edf641d6c | ||
![]() |
d9b14a86f0 | ||
![]() |
7b8fa49632 | ||
![]() |
b5540d5999 | ||
![]() |
494ff0c4c1 | ||
![]() |
1c4c8943a9 | ||
![]() |
5cd56fb834 | ||
![]() |
ed89d6b00e | ||
![]() |
19b28f4734 | ||
![]() |
4a8fdf6843 | ||
![]() |
d040b9dd4e | ||
![]() |
4036bf781a | ||
![]() |
d011e24f74 | ||
![]() |
0e9f78e9df | ||
![]() |
254097b2e2 | ||
![]() |
8049cef23b | ||
![]() |
de842dbf30 | ||
![]() |
8bcac58154 | ||
![]() |
511b78277f | ||
![]() |
dbd019943a | ||
![]() |
7d8a9d5636 | ||
![]() |
2ab6b10a03 | ||
![]() |
cd7789e8df | ||
![]() |
9287aa891f | ||
![]() |
a99203382c | ||
![]() |
6ec649df78 | ||
![]() |
e9f4e4c2cc | ||
![]() |
18cc6bb3ff | ||
![]() |
8094d8fe18 | ||
![]() |
b7a469d53c | ||
![]() |
f168c6c33d | ||
![]() |
981b62d9e9 | ||
![]() |
2c42d71a6a | ||
![]() |
ca6bc57c6d | ||
![]() |
0f0be17d0a | ||
![]() |
5e5a7922d0 | ||
![]() |
bb4cb39373 | ||
![]() |
a257f516fa | ||
![]() |
9d8284d85f | ||
![]() |
57efa4f628 | ||
![]() |
5b5f44085a | ||
![]() |
46ce2741bc | ||
![]() |
0b799fc882 | ||
![]() |
04b57a60b3 | ||
![]() |
02b4314046 | ||
![]() |
3d79fa76ab | ||
![]() |
ca1803ae93 | ||
![]() |
9046ec21d4 | ||
![]() |
b30654e3d5 | ||
![]() |
ee19e11e6d | ||
![]() |
a990c97959 | ||
![]() |
56816b13ba | ||
![]() |
b414aaa195 | ||
![]() |
1a1ae35ec9 | ||
![]() |
1029f393e4 | ||
![]() |
c1039dfe26 | ||
![]() |
cc96646e37 | ||
![]() |
d2ad7c81de | ||
![]() |
00e9c1131f | ||
![]() |
77cc9abe49 | ||
![]() |
91018bfe0f | ||
![]() |
7f9bcb283f | ||
![]() |
ae11c1c795 | ||
![]() |
9cd5fa596b | ||
![]() |
e10b10224a | ||
![]() |
e08886ae09 | ||
![]() |
df3ffcf7f9 | ||
![]() |
0c77f7be91 | ||
![]() |
cdc8e3144e | ||
![]() |
92847079d7 | ||
![]() |
b023c36941 | ||
![]() |
aee3b0b710 | ||
![]() |
d0c8afb601 | ||
![]() |
e9c14bbcea | ||
![]() |
2eca19daa3 | ||
![]() |
bcc7a75baa | ||
![]() |
19222e9341 | ||
![]() |
8124a61738 | ||
![]() |
09007146d0 | ||
![]() |
e715c64404 | ||
![]() |
53ff4510ac | ||
![]() |
495031001d | ||
![]() |
97e89384f1 | ||
![]() |
72913cda76 | ||
![]() |
03097aaa35 | ||
![]() |
f6d3f67b52 | ||
![]() |
62e0ac462e | ||
![]() |
54310b014b | ||
![]() |
beaf54f624 | ||
![]() |
114f1e0985 | ||
![]() |
1fdfe05c4c | ||
![]() |
9c1f91d40b | ||
![]() |
39711b332f | ||
![]() |
6cfda147b4 | ||
![]() |
eabdc610a0 | ||
![]() |
da74f5aa7e | ||
![]() |
14c575a7c4 | ||
![]() |
d8c8d28c13 | ||
![]() |
bf56f1f7e3 | ||
![]() |
8046d154ee | ||
![]() |
589117e204 | ||
![]() |
e9e5423f97 | ||
![]() |
c08f447cec | ||
![]() |
cf25175155 |
@@ -1,4 +1,5 @@
|
||||
language: java
|
||||
sudo: false
|
||||
|
||||
env:
|
||||
# Give Maven 1GB of memory to work with
|
||||
|
2
LICENSE
2
LICENSE
@@ -1,7 +1,7 @@
|
||||
DSpace source code license:
|
||||
|
||||
|
||||
Copyright (c) 2002-2013, DuraSpace. All rights reserved.
|
||||
Copyright (c) 2002-2015, DuraSpace. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
|
@@ -266,22 +266,24 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Morfologik Stemming Dictionary for Polish (org.carrot2:morfologik-polish:1.7.1 - http://morfologik.blogspot.com/morfologik-polish/)
|
||||
* Morfologik Stemming APIs (org.carrot2:morfologik-stemming:1.7.1 - http://morfologik.blogspot.com/morfologik-stemming/)
|
||||
* databene ContiPerf (org.databene:contiperf:2.2.0 - http://databene.org/contiperf)
|
||||
* DSpace Kernel :: API and Implementation (org.dspace:dspace-api:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api)
|
||||
* DSpace JSP-UI (org.dspace:dspace-jspui:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui)
|
||||
* DSpace OAI-PMH (org.dspace:dspace-oai:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai)
|
||||
* DSpace RDF (org.dspace:dspace-rdf:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf)
|
||||
* DSpace REST :: API and Implementation (org.dspace:dspace-rest:5.0-rc4-SNAPSHOT - http://demo.dspace.org)
|
||||
* DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services)
|
||||
* Apache Solr Webapp (org.dspace:dspace-solr:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr)
|
||||
* DSpace SWORD (org.dspace:dspace-sword:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword)
|
||||
* DSpace SWORD v2 (org.dspace:dspace-swordv2:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2)
|
||||
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui)
|
||||
* DSpace Kernel :: API and Implementation (org.dspace:dspace-api:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api)
|
||||
* DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:5.0.4 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-api-lang)
|
||||
* DSpace JSP-UI (org.dspace:dspace-jspui:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui)
|
||||
* DSpace OAI-PMH (org.dspace:dspace-oai:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai)
|
||||
* DSpace RDF (org.dspace:dspace-rdf:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf)
|
||||
* DSpace REST :: API and Implementation (org.dspace:dspace-rest:5.3-SNAPSHOT - http://demo.dspace.org)
|
||||
* DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services)
|
||||
* Apache Solr Webapp (org.dspace:dspace-solr:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr)
|
||||
* DSpace SWORD (org.dspace:dspace-sword:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword)
|
||||
* DSpace SWORD v2 (org.dspace:dspace-swordv2:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2)
|
||||
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui)
|
||||
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:5.0.5 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-xmlui-lang)
|
||||
* handle (org.dspace:handle:6.2 - no url defined)
|
||||
* jargon (org.dspace:jargon:1.4.25 - no url defined)
|
||||
* mets (org.dspace:mets:1.5.2 - no url defined)
|
||||
* oclc-harvester2 (org.dspace:oclc-harvester2:0.1.12 - no url defined)
|
||||
* Repackaged Cocoon Servlet Service Implementation (org.dspace.dependencies.cocoon:dspace-cocoon-servlet-service-impl:1.0.3 - http://projects.dspace.org/dspace-pom/dspace-cocoon-servlet-service-impl)
|
||||
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
|
||||
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:5.3-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
|
||||
* Hamcrest All (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all)
|
||||
* Hamcrest Core (org.hamcrest:hamcrest-core:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-core)
|
||||
* JBibTeX (org.jbibtex:jbibtex:1.0.10 - http://www.jbibtex.org)
|
||||
@@ -386,8 +388,3 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Dough Lea's util.concurrent package (concurrent:concurrent:1.3.4 - no url defined)
|
||||
* Reflections (org.reflections:reflections:0.9.9-RC1 - http://code.google.com/p/reflections/reflections/)
|
||||
* XZ for Java (org.tukaani:xz:1.4 - http://tukaani.org/xz/java.html)
|
||||
|
||||
Unknown license:
|
||||
|
||||
* DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:5.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-api-lang)
|
||||
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:5.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-xmlui-lang)
|
||||
|
@@ -66,14 +66,12 @@ db.password=dspace
|
||||
#db.username=dspace
|
||||
#db.password=dspace
|
||||
|
||||
# Schema name - if your database contains multiple schemas, you can avoid problems with
|
||||
# retrieving the definitions of duplicate object names by specifying
|
||||
# the schema name here that is used for DSpace by uncommenting the following entry
|
||||
|
||||
# NOTE: this configuration option is for PostgreSQL only. For Oracle, schema is equivalent
|
||||
# to user name. DSpace depends on the PostgreSQL understanding of schema. If you are using
|
||||
# Oracle, just leave this this value blank.
|
||||
|
||||
# Schema name - if your database contains multiple schemas, you can avoid
|
||||
# problems with retrieving the definitions of duplicate object names by
|
||||
# specifying the schema name that is used for DSpace.
|
||||
# ORACLE USAGE NOTE: In Oracle, schema is equivalent to "username". This means
|
||||
# specifying a "db.schema" is often unnecessary (i.e. you can leave it blank),
|
||||
# UNLESS your Oracle DB Account (in db.username) has access to multiple schemas.
|
||||
db.schema =
|
||||
|
||||
# Maximum number of DB connections in pool
|
||||
|
@@ -12,7 +12,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>5.0</version>
|
||||
<version>5.5</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -603,13 +603,21 @@
|
||||
<groupId>com.google.apis</groupId>
|
||||
<artifactId>google-api-services-analytics</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.api-client</groupId>
|
||||
<artifactId>google-api-client</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.http-client</groupId>
|
||||
<artifactId>google-http-client</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.http-client</groupId>
|
||||
<artifactId>google-http-client-jackson2</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.oauth-client</groupId>
|
||||
<artifactId>google-oauth-client-jetty</artifactId>
|
||||
<artifactId>google-oauth-client</artifactId>
|
||||
</dependency>
|
||||
<!-- FindBugs -->
|
||||
<dependency>
|
||||
|
@@ -1169,10 +1169,8 @@ public class MetadataImport
|
||||
*/
|
||||
private static boolean isAuthorityControlledField(String md)
|
||||
{
|
||||
int pos = md.indexOf("[");
|
||||
String mdf = (pos > -1 ? md.substring(0, pos) : md);
|
||||
pos = md.indexOf(":");
|
||||
mdf = (pos > -1 ? md.substring(pos+1) : md);
|
||||
String mdf = StringUtils.substringAfter(md, ":");
|
||||
mdf = StringUtils.substringBefore(mdf, "[");
|
||||
return authorityControlled.contains(mdf);
|
||||
}
|
||||
|
||||
|
@@ -11,6 +11,7 @@ import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.List;
|
||||
import java.util.TreeMap;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.servicemanager.DSpaceKernelImpl;
|
||||
import org.dspace.servicemanager.DSpaceKernelInit;
|
||||
@@ -275,9 +276,21 @@ public class ScriptLauncher
|
||||
*/
|
||||
private static void display()
|
||||
{
|
||||
// List all command elements
|
||||
List<Element> commands = commandConfigs.getRootElement().getChildren("command");
|
||||
System.out.println("Usage: dspace [command-name] {parameters}");
|
||||
|
||||
// Sort the commands by name.
|
||||
// We cannot just use commands.sort() because it tries to remove and
|
||||
// reinsert Elements within other Elements, and that doesn't work.
|
||||
TreeMap<String, Element> sortedCommands = new TreeMap<>();
|
||||
for (Element command : commands)
|
||||
{
|
||||
sortedCommands.put(command.getChild("name").getValue(), command);
|
||||
}
|
||||
|
||||
// Display the sorted list
|
||||
System.out.println("Usage: dspace [command-name] {parameters}");
|
||||
for (Element command : sortedCommands.values())
|
||||
{
|
||||
System.out.println(" - " + command.getChild("name").getValue() +
|
||||
": " + command.getChild("description").getValue());
|
||||
|
@@ -7,9 +7,10 @@
|
||||
*/
|
||||
package org.dspace.app.mediafilter;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
|
||||
|
||||
/**
|
||||
@@ -30,9 +31,24 @@ public class ImageMagickImageThumbnailFilter extends ImageMagickThumbnailFilter
|
||||
throws Exception
|
||||
{
|
||||
File f = inputStreamToTempFile(source, "imthumb", ".tmp");
|
||||
File f2 = getThumbnailFile(f);
|
||||
return new FileInputStream(f2);
|
||||
}
|
||||
File f2 = null;
|
||||
try
|
||||
{
|
||||
f2 = getThumbnailFile(f);
|
||||
byte[] bytes = Files.readAllBytes(f2.toPath());
|
||||
return new ByteArrayInputStream(bytes);
|
||||
}
|
||||
finally
|
||||
{
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f.delete();
|
||||
if (f2 != null)
|
||||
{
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f2.delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@@ -7,18 +7,40 @@
|
||||
*/
|
||||
package org.dspace.app.mediafilter;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
|
||||
public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter {
|
||||
public InputStream getDestinationStream(InputStream source)
|
||||
throws Exception
|
||||
{
|
||||
File f = inputStreamToTempFile(source, "impdfthumb", ".pdf");
|
||||
File f2 = getImageFile(f, 0);
|
||||
File f3 = getThumbnailFile(f2);
|
||||
return new FileInputStream(f3);
|
||||
File f2 = null;
|
||||
File f3 = null;
|
||||
try
|
||||
{
|
||||
f2 = getImageFile(f, 0);
|
||||
f3 = getThumbnailFile(f2);
|
||||
byte[] bytes = Files.readAllBytes(f3.toPath());
|
||||
return new ByteArrayInputStream(bytes);
|
||||
}
|
||||
finally
|
||||
{
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f.delete();
|
||||
if (f2 != null)
|
||||
{
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f2.delete();
|
||||
}
|
||||
if (f3 != null)
|
||||
{
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f3.delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static final String[] PDF = {"Adobe PDF"};
|
||||
|
@@ -38,6 +38,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
|
||||
{
|
||||
private static int width = 180;
|
||||
private static int height = 120;
|
||||
private static boolean flatten = true;
|
||||
static String bitstreamDescription = "IM Thumbnail";
|
||||
static final String defaultPattern = "Generated Thumbnail";
|
||||
static Pattern replaceRegex = Pattern.compile(defaultPattern);
|
||||
@@ -48,6 +49,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
|
||||
ProcessStarter.setGlobalSearchPath(s);
|
||||
width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width);
|
||||
height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height);
|
||||
flatten = ConfigurationManager.getBooleanProperty(pre + ".flatten", flatten);
|
||||
String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription");
|
||||
if (description != null) {
|
||||
bitstreamDescription = description;
|
||||
@@ -132,6 +134,10 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
|
||||
IMOperation op = new IMOperation();
|
||||
String s = "[" + page + "]";
|
||||
op.addImage(f.getAbsolutePath()+s);
|
||||
if (flatten)
|
||||
{
|
||||
op.flatten();
|
||||
}
|
||||
op.addImage(f2.getAbsolutePath());
|
||||
if (MediaFilterManager.isVerbose) {
|
||||
System.out.println("IM Image Param: "+op);
|
||||
|
@@ -33,7 +33,7 @@ import java.util.zip.GZIPOutputStream;
|
||||
* }
|
||||
* g.finish();
|
||||
* </pre>
|
||||
*
|
||||
*
|
||||
* @author Robert Tansley
|
||||
*/
|
||||
public abstract class AbstractGenerator
|
||||
@@ -59,7 +59,7 @@ public abstract class AbstractGenerator
|
||||
/**
|
||||
* Initialize this generator to write to the given directory. This must be
|
||||
* called by any subclass constructor.
|
||||
*
|
||||
*
|
||||
* @param outputDirIn
|
||||
* directory to write sitemap files to
|
||||
*/
|
||||
@@ -73,7 +73,7 @@ public abstract class AbstractGenerator
|
||||
|
||||
/**
|
||||
* Start writing a new sitemap file.
|
||||
*
|
||||
*
|
||||
* @throws IOException
|
||||
* if an error occurs creating the file
|
||||
*/
|
||||
@@ -97,7 +97,7 @@ public abstract class AbstractGenerator
|
||||
|
||||
/**
|
||||
* Add the given URL to the sitemap.
|
||||
*
|
||||
*
|
||||
* @param url
|
||||
* Full URL to add
|
||||
* @param lastMod
|
||||
@@ -129,7 +129,7 @@ public abstract class AbstractGenerator
|
||||
|
||||
/**
|
||||
* Finish with the current sitemap file.
|
||||
*
|
||||
*
|
||||
* @throws IOException
|
||||
* if an error occurs writing
|
||||
*/
|
||||
@@ -144,15 +144,18 @@ public abstract class AbstractGenerator
|
||||
* Complete writing sitemap files and write the index files. This is invoked
|
||||
* when all calls to {@link AbstractGenerator#addURL(String, Date)} have
|
||||
* been completed, and invalidates the generator.
|
||||
*
|
||||
*
|
||||
* @return number of sitemap files written.
|
||||
*
|
||||
*
|
||||
* @throws IOException
|
||||
* if an error occurs writing
|
||||
*/
|
||||
public int finish() throws IOException
|
||||
{
|
||||
closeCurrentFile();
|
||||
if (null != currentOutput)
|
||||
{
|
||||
closeCurrentFile();
|
||||
}
|
||||
|
||||
OutputStream fo = new FileOutputStream(new File(outputDir,
|
||||
getIndexFilename()));
|
||||
@@ -165,13 +168,13 @@ public abstract class AbstractGenerator
|
||||
PrintStream out = new PrintStream(fo);
|
||||
writeIndex(out, fileCount);
|
||||
out.close();
|
||||
|
||||
|
||||
return fileCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return marked-up text to be included in a sitemap about a given URL.
|
||||
*
|
||||
*
|
||||
* @param url
|
||||
* URL to add information about
|
||||
* @param lastMod
|
||||
@@ -183,14 +186,14 @@ public abstract class AbstractGenerator
|
||||
|
||||
/**
|
||||
* Return the boilerplate at the top of a sitemap file.
|
||||
*
|
||||
*
|
||||
* @return The boilerplate markup.
|
||||
*/
|
||||
public abstract String getLeadingBoilerPlate();
|
||||
|
||||
/**
|
||||
* Return the boilerplate at the end of a sitemap file.
|
||||
*
|
||||
*
|
||||
* @return The boilerplate markup.
|
||||
*/
|
||||
public abstract String getTrailingBoilerPlate();
|
||||
@@ -198,7 +201,7 @@ public abstract class AbstractGenerator
|
||||
/**
|
||||
* Return the maximum size in bytes that an individual sitemap file should
|
||||
* be.
|
||||
*
|
||||
*
|
||||
* @return the size in bytes.
|
||||
*/
|
||||
public abstract int getMaxSize();
|
||||
@@ -206,7 +209,7 @@ public abstract class AbstractGenerator
|
||||
/**
|
||||
* Return the maximum number of URLs that an individual sitemap file should
|
||||
* contain.
|
||||
*
|
||||
*
|
||||
* @return the maximum number of URLs.
|
||||
*/
|
||||
public abstract int getMaxURLs();
|
||||
@@ -214,7 +217,7 @@ public abstract class AbstractGenerator
|
||||
/**
|
||||
* Return whether the written sitemap files and index should be
|
||||
* GZIP-compressed.
|
||||
*
|
||||
*
|
||||
* @return {@code true} if GZIP compression should be used, {@code false}
|
||||
* otherwise.
|
||||
*/
|
||||
@@ -222,7 +225,7 @@ public abstract class AbstractGenerator
|
||||
|
||||
/**
|
||||
* Return the filename a sitemap at the given index should be stored at.
|
||||
*
|
||||
*
|
||||
* @param number
|
||||
* index of the sitemap file (zero is first).
|
||||
* @return the filename to write the sitemap to.
|
||||
@@ -231,14 +234,14 @@ public abstract class AbstractGenerator
|
||||
|
||||
/**
|
||||
* Get the filename the index should be written to.
|
||||
*
|
||||
*
|
||||
* @return the filename of the index.
|
||||
*/
|
||||
public abstract String getIndexFilename();
|
||||
|
||||
/**
|
||||
* Write the index file.
|
||||
*
|
||||
*
|
||||
* @param output
|
||||
* stream to write the index to
|
||||
* @param sitemapCount
|
||||
|
@@ -11,6 +11,7 @@ import java.sql.SQLException;
|
||||
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
|
||||
import org.dspace.authorize.AuthorizeManager;
|
||||
import org.dspace.content.*;
|
||||
|
||||
@@ -22,20 +23,22 @@ import java.util.Enumeration;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.handle.HandleManager;
|
||||
|
||||
import org.jdom.Element;
|
||||
|
||||
/**
|
||||
@@ -757,16 +760,17 @@ public class GoogleMetadata
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch all metadata mappings
|
||||
*
|
||||
* Fetch retaining the order of the values for any given key in which they
|
||||
* where added (like authors).
|
||||
*
|
||||
* Usage: GoogleMetadata gmd = new GoogleMetadata(item); for(Entry<String,
|
||||
* String> mapping : googlemd.getMappings()) { ... }
|
||||
*
|
||||
* @return Iterable of metadata fields mapped to Google-formatted values
|
||||
*/
|
||||
public Set<Entry<String, String>> getMappings()
|
||||
public Collection<Entry<String, String>> getMappings()
|
||||
{
|
||||
return new HashSet<>(metadataMappings.entries());
|
||||
return metadataMappings.entries();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1041,7 +1045,6 @@ public class GoogleMetadata
|
||||
*/
|
||||
private Bitstream findLinkableFulltext(Item item) throws SQLException {
|
||||
Bitstream bestSoFar = null;
|
||||
int bitstreamCount = 0;
|
||||
Bundle[] contentBundles = item.getBundles("ORIGINAL");
|
||||
for (Bundle bundle : contentBundles) {
|
||||
int primaryBitstreamId = bundle.getPrimaryBitstreamID();
|
||||
@@ -1050,16 +1053,16 @@ public class GoogleMetadata
|
||||
if (candidate.getID() == primaryBitstreamId) { // is primary -> use this one
|
||||
if (isPublic(candidate)) {
|
||||
return candidate;
|
||||
}
|
||||
} else
|
||||
{
|
||||
|
||||
if (bestSoFar == null && isPublic(candidate)) { //if bestSoFar is null but the candidate is not public you don't use it and try to find another
|
||||
bestSoFar = candidate;
|
||||
}
|
||||
}
|
||||
} else if (bestSoFar == null) {
|
||||
bestSoFar = candidate;
|
||||
}
|
||||
bitstreamCount++;
|
||||
}
|
||||
}
|
||||
if (bitstreamCount > 1 || !isPublic(bestSoFar)) {
|
||||
bestSoFar = null;
|
||||
}
|
||||
|
||||
return bestSoFar;
|
||||
}
|
||||
|
@@ -20,6 +20,7 @@ import org.dspace.eperson.Group;
|
||||
import org.dspace.storage.rdbms.DatabaseManager;
|
||||
import org.dspace.storage.rdbms.TableRow;
|
||||
import org.dspace.storage.rdbms.TableRowIterator;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
|
||||
/**
|
||||
* AuthorizeManager handles all authorization checks for DSpace. For better
|
||||
@@ -295,8 +296,43 @@ public class AuthorizeManager
|
||||
}
|
||||
}
|
||||
|
||||
// In case the dso is an bundle or bitstream we must ignore custom
|
||||
// policies if it does not belong to at least one installed item (see
|
||||
// DS-2614).
|
||||
// In case the dso is an item and a corresponding workspace or workflow
|
||||
// item exist, we have to ignore custom policies (see DS-2614).
|
||||
boolean ignoreCustomPolicies = false;
|
||||
if (o instanceof Bitstream)
|
||||
{
|
||||
Bitstream b = (Bitstream) o;
|
||||
|
||||
// Ensure that this is not a collection or community logo
|
||||
DSpaceObject parent = b.getParentObject();
|
||||
if (!(parent instanceof Collection) && !(parent instanceof Community))
|
||||
{
|
||||
ignoreCustomPolicies = !isAnyItemInstalled(c, b.getBundles());
|
||||
}
|
||||
}
|
||||
if (o instanceof Bundle)
|
||||
{
|
||||
ignoreCustomPolicies = !isAnyItemInstalled(c, new Bundle[] {(Bundle) o});
|
||||
}
|
||||
if (o instanceof Item)
|
||||
{
|
||||
if (WorkspaceItem.findByItem(c, (Item) o) != null ||
|
||||
WorkflowItem.findByItem(c, (Item) o) != null)
|
||||
{
|
||||
ignoreCustomPolicies = true;
|
||||
}
|
||||
}
|
||||
|
||||
for (ResourcePolicy rp : getPoliciesActionFilter(c, o, action))
|
||||
{
|
||||
if (ignoreCustomPolicies
|
||||
&& ResourcePolicy.TYPE_CUSTOM.equals(rp.getRpType()))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
// check policies for date validity
|
||||
if (rp.isDateValid())
|
||||
{
|
||||
@@ -318,7 +354,26 @@ public class AuthorizeManager
|
||||
// default authorization is denial
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
// check whether any bundle belongs to any item that passed submission
|
||||
// and workflow process
|
||||
protected static boolean isAnyItemInstalled(Context ctx, Bundle[] bundles)
|
||||
throws SQLException
|
||||
{
|
||||
for (Bundle bundle : bundles)
|
||||
{
|
||||
for (Item item : bundle.getItems())
|
||||
{
|
||||
if (WorkspaceItem.findByItem(ctx, item) == null
|
||||
&& WorkflowItem.findByItem(ctx, item) == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////
|
||||
// admin check methods
|
||||
///////////////////////////////////////////////
|
||||
@@ -837,7 +892,7 @@ public class AuthorizeManager
|
||||
throws SQLException
|
||||
{
|
||||
DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE "
|
||||
+ "resource_type_id= ? AND resource_id= ? AND rptype <> ? ",
|
||||
+ "resource_type_id= ? AND resource_id= ? AND (rptype <> ? OR rptype IS NULL)",
|
||||
o.getType(), o.getID(), type);
|
||||
}
|
||||
|
||||
|
@@ -354,7 +354,7 @@ public class BrowserScope
|
||||
*/
|
||||
public void setResultsPerPage(int resultsPerPage)
|
||||
{
|
||||
if (resultsPerPage > -1 || browseIndex.isTagCloudEnabled())
|
||||
if (resultsPerPage > -1 || (browseIndex != null && browseIndex.isTagCloudEnabled()))
|
||||
{
|
||||
this.resultsPerPage = resultsPerPage;
|
||||
}
|
||||
|
@@ -32,6 +32,8 @@ import org.dspace.utils.DSpace;
|
||||
*
|
||||
* @author Andrea Bollini (CILEA)
|
||||
* @author Adán Román Ruiz at arvo.es (bugfix)
|
||||
* @author Panagiotis Koutsourakis (National Documentation Centre) (bugfix)
|
||||
* @author Kostas Stamatis (National Documentation Centre) (bugfix)
|
||||
*
|
||||
*/
|
||||
public class SolrBrowseDAO implements BrowseDAO
|
||||
@@ -336,6 +338,22 @@ public class SolrBrowseDAO implements BrowseDAO
|
||||
addStatusFilter(query);
|
||||
query.setMaxResults(0);
|
||||
query.addFilterQueries("search.resourcetype:" + Constants.ITEM);
|
||||
|
||||
// We need to take into account the fact that we may be in a subset of the items
|
||||
if (authority != null)
|
||||
{
|
||||
query.addFilterQueries("{!field f="+facetField + "_authority_filter}"
|
||||
+ authority);
|
||||
}
|
||||
else if (this.value != null && !valuePartial)
|
||||
{
|
||||
query.addFilterQueries("{!field f="+facetField + "_value_filter}" + this.value);
|
||||
}
|
||||
else if (valuePartial)
|
||||
{
|
||||
query.addFilterQueries("{!field f="+facetField + "_partial}" + this.value);
|
||||
}
|
||||
|
||||
if (isAscending)
|
||||
{
|
||||
query.setQuery("bi_"+column + "_sort" + ": [* TO \"" + value + "\"}");
|
||||
@@ -343,6 +361,7 @@ public class SolrBrowseDAO implements BrowseDAO
|
||||
else
|
||||
{
|
||||
query.setQuery("bi_" + column + "_sort" + ": {\"" + value + "\" TO *]");
|
||||
query.addFilterQueries("-(bi_" + column + "_sort" + ":" + value + "*)");
|
||||
}
|
||||
boolean includeUnDiscoverable = itemsWithdrawn || !itemsDiscoverable;
|
||||
DiscoverResult resp = null;
|
||||
|
@@ -28,6 +28,7 @@ import org.dspace.workflow.WorkflowItem;
|
||||
import org.dspace.xmlworkflow.storedcomponents.CollectionRole;
|
||||
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.sql.PreparedStatement;
|
||||
@@ -294,31 +295,48 @@ public class Collection extends DSpaceObject
|
||||
* @return the collections in the system
|
||||
* @throws SQLException
|
||||
*/
|
||||
public static Collection[] findAll(Context context) throws SQLException {
|
||||
public static Collection[] findAll(Context context) throws SQLException
|
||||
{
|
||||
TableRowIterator tri = null;
|
||||
try {
|
||||
String query = "SELECT c.* FROM collection c " +
|
||||
"LEFT JOIN metadatavalue m on (m.resource_id = c.collection_id and m.resource_type_id = ? and m.metadata_field_id = ?) ";
|
||||
if(DatabaseManager.isOracle()){
|
||||
query += " ORDER BY cast(m.text_value as varchar2(128))";
|
||||
}else{
|
||||
query += " ORDER BY m.text_value";
|
||||
}
|
||||
List<Collection> collections = null;
|
||||
List<Serializable> params = new ArrayList<Serializable>();
|
||||
StringBuffer query = new StringBuffer(
|
||||
"SELECT c.*" +
|
||||
"FROM collection c " +
|
||||
"LEFT JOIN metadatavalue m ON (" +
|
||||
"m.resource_id = c.collection_id AND " +
|
||||
"m.resource_type_id = ? AND " +
|
||||
"m.metadata_field_id = ?" +
|
||||
")"
|
||||
);
|
||||
|
||||
tri = DatabaseManager.query(context,
|
||||
query,
|
||||
Constants.COLLECTION,
|
||||
MetadataField.findByElement(context, MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(), "title", null).getFieldID()
|
||||
);
|
||||
} catch (SQLException e) {
|
||||
log.error("Find all Collections - ",e);
|
||||
throw e;
|
||||
if (DatabaseManager.isOracle())
|
||||
{
|
||||
query.append(" ORDER BY cast(m.text_value as varchar2(128))");
|
||||
}
|
||||
else
|
||||
{
|
||||
query.append(" ORDER BY m.text_value");
|
||||
}
|
||||
|
||||
List<Collection> collections = new ArrayList<Collection>();
|
||||
params.add(Constants.COLLECTION);
|
||||
params.add(
|
||||
MetadataField.findByElement(
|
||||
context,
|
||||
MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(),
|
||||
"title",
|
||||
null
|
||||
).getFieldID()
|
||||
);
|
||||
|
||||
try
|
||||
{
|
||||
tri = DatabaseManager.query(
|
||||
context, query.toString(), params.toArray()
|
||||
);
|
||||
|
||||
collections = new ArrayList<Collection>();
|
||||
|
||||
while (tri.hasNext())
|
||||
{
|
||||
TableRow row = tri.next();
|
||||
@@ -337,6 +355,11 @@ public class Collection extends DSpaceObject
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (SQLException e)
|
||||
{
|
||||
log.error("Find all Collections - ", e);
|
||||
throw e;
|
||||
}
|
||||
finally
|
||||
{
|
||||
// close the TableRowIterator to free up resources
|
||||
@@ -363,31 +386,47 @@ public class Collection extends DSpaceObject
|
||||
public static Collection[] findAll(Context context, Integer limit, Integer offset) throws SQLException
|
||||
{
|
||||
TableRowIterator tri = null;
|
||||
try{
|
||||
String query = "SELECT c.* FROM collection c " +
|
||||
"LEFT JOIN metadatavalue m on (m.resource_id = c.collection_id and m.resource_type_id = ? and m.metadata_field_id = ?) ";
|
||||
List<Collection> collections = null;
|
||||
List<Serializable> params = new ArrayList<Serializable>();
|
||||
StringBuffer query = new StringBuffer(
|
||||
"SELECT c.*" +
|
||||
"FROM collection c " +
|
||||
"LEFT JOIN metadatavalue m ON (" +
|
||||
"m.resource_id = c.collection_id AND " +
|
||||
"m.resource_type_id = ? AND " +
|
||||
"m.metadata_field_id = ?" +
|
||||
")"
|
||||
);
|
||||
|
||||
if(DatabaseManager.isOracle()){
|
||||
query += " ORDER BY cast(m.text_value as varchar2(128))";
|
||||
}else{
|
||||
query += " ORDER BY m.text_value";
|
||||
}
|
||||
query += " limit ? offset ?";
|
||||
tri = DatabaseManager.query(context,
|
||||
query,
|
||||
Constants.COLLECTION,
|
||||
MetadataField.findByElement(context, MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(), "title", null).getFieldID(),
|
||||
limit,
|
||||
offset
|
||||
);
|
||||
} catch (SQLException e) {
|
||||
log.error("Find all Collections offset/limit - ",e);
|
||||
throw e;
|
||||
if (DatabaseManager.isOracle())
|
||||
{
|
||||
query.append(" ORDER BY cast(m.text_value as varchar2(128))");
|
||||
}
|
||||
List<Collection> collections = new ArrayList<Collection>();
|
||||
else
|
||||
{
|
||||
query.append(" ORDER BY m.text_value");
|
||||
}
|
||||
|
||||
params.add(Constants.COLLECTION);
|
||||
params.add(
|
||||
MetadataField.findByElement(
|
||||
context,
|
||||
MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(),
|
||||
"title",
|
||||
null
|
||||
).getFieldID()
|
||||
);
|
||||
|
||||
DatabaseManager.applyOffsetAndLimit(query, params, offset, limit);
|
||||
|
||||
try
|
||||
{
|
||||
tri = DatabaseManager.query(
|
||||
context, query.toString(), params.toArray()
|
||||
);
|
||||
|
||||
collections = new ArrayList<Collection>();
|
||||
|
||||
while (tri.hasNext())
|
||||
{
|
||||
TableRow row = tri.next();
|
||||
@@ -406,6 +445,11 @@ public class Collection extends DSpaceObject
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (SQLException e)
|
||||
{
|
||||
log.error("Find all Collections offset/limit - ", e);
|
||||
throw e;
|
||||
}
|
||||
finally
|
||||
{
|
||||
// close the TableRowIterator to free up resources
|
||||
@@ -450,13 +494,20 @@ public class Collection extends DSpaceObject
|
||||
*/
|
||||
public ItemIterator getItems(Integer limit, Integer offset) throws SQLException
|
||||
{
|
||||
String myQuery = "SELECT item.* FROM item, collection2item WHERE "
|
||||
+ "item.item_id=collection2item.item_id AND "
|
||||
+ "collection2item.collection_id= ? "
|
||||
+ "AND item.in_archive='1' limit ? offset ?";
|
||||
List<Serializable> params = new ArrayList<Serializable>();
|
||||
StringBuffer myQuery = new StringBuffer(
|
||||
"SELECT item.* " +
|
||||
"FROM item, collection2item " +
|
||||
"WHERE item.item_id = collection2item.item_id " +
|
||||
"AND collection2item.collection_id = ? " +
|
||||
"AND item.in_archive = '1'"
|
||||
);
|
||||
|
||||
TableRowIterator rows = DatabaseManager.queryTable(ourContext, "item",
|
||||
myQuery,getID(), limit, offset);
|
||||
params.add(getID());
|
||||
DatabaseManager.applyOffsetAndLimit(myQuery, params, offset, limit);
|
||||
|
||||
TableRowIterator rows = DatabaseManager.query(ourContext,
|
||||
myQuery.toString(), params.toArray());
|
||||
|
||||
return new ItemIterator(ourContext, rows);
|
||||
}
|
||||
@@ -1513,7 +1564,7 @@ public class Collection extends DSpaceObject
|
||||
|
||||
public static Collection[] findAuthorizedOptimized(Context context, int actionID) throws java.sql.SQLException
|
||||
{
|
||||
if(! ConfigurationManager.getBooleanProperty("org.dspace.content.Collection.findAuthorizedPerformanceOptimize", true)) {
|
||||
if(! ConfigurationManager.getBooleanProperty("org.dspace.content.Collection.findAuthorizedPerformanceOptimize", false)) {
|
||||
// Fallback to legacy query if config says so. The rationale could be that a site found a bug.
|
||||
return findAuthorized(context, null, actionID);
|
||||
}
|
||||
|
@@ -263,7 +263,7 @@ public class Item extends DSpaceObject
|
||||
}
|
||||
|
||||
String query = "SELECT item.* FROM metadatavalue,item WHERE item.in_archive='1' " +
|
||||
"AND item.item_id = metadatavalue.item_id AND metadata_field_id = ?";
|
||||
"AND item.item_id = metadatavalue.resource_id AND metadatavalue.resource_type_id=2 AND metadata_field_id = ?";
|
||||
TableRowIterator rows = null;
|
||||
if (Item.ANY.equals(authority)) {
|
||||
rows = DatabaseManager.queryTable(context, "item", query, mdf.getFieldID());
|
||||
|
@@ -21,15 +21,7 @@ import java.util.zip.ZipFile;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.BitstreamFormat;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.FormatIdentifier;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.WorkspaceItem;
|
||||
import org.dspace.content.*;
|
||||
import org.dspace.content.crosswalk.CrosswalkException;
|
||||
import org.dspace.content.crosswalk.MetadataValidationException;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
@@ -37,6 +29,8 @@ import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.handle.HandleManager;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
|
||||
import org.jdom.Element;
|
||||
|
||||
/**
|
||||
@@ -660,8 +654,24 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester
|
||||
addBitstreams(context, item, manifest, pkgFile, params, callback);
|
||||
|
||||
// have subclass manage license since it may be extra package file.
|
||||
addLicense(context, item, license, (Collection) dso
|
||||
.getParentObject(), params);
|
||||
Collection owningCollection = (Collection) dso.getParentObject();
|
||||
if(owningCollection == null)
|
||||
{
|
||||
//We are probably dealing with an item that isn't archived yet
|
||||
InProgressSubmission inProgressSubmission = WorkspaceItem.findByItem(context, item);
|
||||
if(inProgressSubmission == null)
|
||||
{
|
||||
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("xmlworkflow"))
|
||||
{
|
||||
inProgressSubmission = XmlWorkflowItem.findByItem(context, item);
|
||||
}else{
|
||||
inProgressSubmission = WorkflowItem.findByItem(context, item);
|
||||
}
|
||||
}
|
||||
owningCollection = inProgressSubmission.getCollection();
|
||||
}
|
||||
|
||||
addLicense(context, item, license, owningCollection, params);
|
||||
|
||||
// FIXME ?
|
||||
// should set lastModifiedTime e.g. when ingesting AIP.
|
||||
|
@@ -474,8 +474,18 @@ public class Email
|
||||
System.out.println(" - To: " + to);
|
||||
System.out.println(" - Subject: " + subject);
|
||||
System.out.println(" - Server: " + server);
|
||||
boolean disabled = ConfigurationManager.getBooleanProperty("mail.server.disabled", false);
|
||||
try
|
||||
{
|
||||
if( disabled)
|
||||
{
|
||||
System.err.println("\nError sending email:");
|
||||
System.err.println(" - Error: cannot test email because mail.server.disabled is set to true");
|
||||
System.err.println("\nPlease see the DSpace documentation for assistance.\n");
|
||||
System.err.println("\n");
|
||||
System.exit(1);
|
||||
return;
|
||||
}
|
||||
e.send();
|
||||
}
|
||||
catch (MessagingException me)
|
||||
|
@@ -16,6 +16,9 @@ import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.PrintWriter;
|
||||
|
||||
import org.dspace.core.service.NewsService;
|
||||
import org.dspace.utils.DSpace;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -39,6 +42,10 @@ public class NewsManager
|
||||
*/
|
||||
public static String readNewsFile(String newsFile)
|
||||
{
|
||||
NewsService newsService = new DSpace().getSingletonService(NewsService.class);
|
||||
if (!newsService.validate(newsFile)) {
|
||||
throw new IllegalArgumentException("The file "+ newsFile + " is not a valid news file");
|
||||
}
|
||||
String fileName = getNewsFilePath();
|
||||
|
||||
fileName += newsFile;
|
||||
@@ -81,6 +88,10 @@ public class NewsManager
|
||||
*/
|
||||
public static String writeNewsFile(String newsFile, String news)
|
||||
{
|
||||
NewsService newsService = new DSpace().getSingletonService(NewsService.class);
|
||||
if (!newsService.validate(newsFile)) {
|
||||
throw new IllegalArgumentException("The file "+ newsFile + " is not a valid news file");
|
||||
}
|
||||
String fileName = getNewsFilePath();
|
||||
|
||||
fileName += newsFile;
|
||||
|
@@ -0,0 +1,29 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.core.service.NewsService;
|
||||
|
||||
public class NewsServiceImpl implements NewsService {
|
||||
private List<String> acceptableFilenames;
|
||||
|
||||
public void setAcceptableFilenames(List<String> acceptableFilenames) {
|
||||
this.acceptableFilenames = acceptableFilenames;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean validate(String newsName) {
|
||||
if (acceptableFilenames != null) {
|
||||
return acceptableFilenames.contains(newsName);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,12 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.core.service;
|
||||
|
||||
public interface NewsService {
|
||||
boolean validate(String newsName);
|
||||
}
|
@@ -218,10 +218,20 @@ public class DiscoverQuery {
|
||||
this.facetOffset = facetOffset;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the fields which you want Discovery to return in the search results.
|
||||
* It is HIGHLY recommended to limit the fields returned, as by default
|
||||
* some backends (like Solr) will return everything.
|
||||
* @param field field to add to the list of fields returned
|
||||
*/
|
||||
public void addSearchField(String field){
|
||||
this.searchFields.add(field);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of fields which Discovery will return in the search results
|
||||
* @return List of field names
|
||||
*/
|
||||
public List<String> getSearchFields() {
|
||||
return searchFields;
|
||||
}
|
||||
|
@@ -113,4 +113,11 @@ public interface SearchService {
|
||||
* @return the indexed field
|
||||
*/
|
||||
String toSortFieldIndex(String metadataField, String type);
|
||||
|
||||
/**
|
||||
* Utility method to escape any special characters in a user's query
|
||||
* @param query
|
||||
* @return query with any special characters escaped
|
||||
*/
|
||||
String escapeQueryChars(String query);
|
||||
}
|
||||
|
@@ -119,6 +119,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
private static final Logger log = Logger.getLogger(SolrServiceImpl.class);
|
||||
|
||||
protected static final String LAST_INDEXED_FIELD = "SolrIndexer.lastIndexed";
|
||||
protected static final String HANDLE_FIELD = "handle";
|
||||
protected static final String RESOURCE_TYPE_FIELD = "search.resourcetype";
|
||||
protected static final String RESOURCE_ID_FIELD = "search.resourceid";
|
||||
|
||||
public static final String FILTER_SEPARATOR = "\n|||\n";
|
||||
|
||||
@@ -149,9 +152,11 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
|
||||
solr.setBaseURL(solrService);
|
||||
solr.setUseMultiPartPost(true);
|
||||
// Dummy/test query to search for Item (type=2) of ID=1
|
||||
SolrQuery solrQuery = new SolrQuery()
|
||||
.setQuery("search.resourcetype:2 AND search.resourceid:1");
|
||||
|
||||
.setQuery(RESOURCE_TYPE_FIELD + ":2 AND " + RESOURCE_ID_FIELD + ":1");
|
||||
// Only return obj identifier fields in result doc
|
||||
solrQuery.setFields(RESOURCE_TYPE_FIELD, RESOURCE_ID_FIELD);
|
||||
solr.query(solrQuery);
|
||||
|
||||
// As long as Solr initialized, check with DatabaseUtils to see
|
||||
@@ -323,7 +328,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
|
||||
try {
|
||||
if(getSolr() != null){
|
||||
getSolr().deleteByQuery("handle:\"" + handle + "\"");
|
||||
getSolr().deleteByQuery(HANDLE_FIELD + ":\"" + handle + "\"");
|
||||
if(commit)
|
||||
{
|
||||
getSolr().commit();
|
||||
@@ -462,10 +467,13 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
}
|
||||
if (force)
|
||||
{
|
||||
getSolr().deleteByQuery("search.resourcetype:[2 TO 4]");
|
||||
getSolr().deleteByQuery(RESOURCE_TYPE_FIELD + ":[2 TO 4]");
|
||||
} else {
|
||||
SolrQuery query = new SolrQuery();
|
||||
query.setQuery("search.resourcetype:[2 TO 4]");
|
||||
// Query for all indexed Items, Collections and Communities,
|
||||
// returning just their handle
|
||||
query.setFields(HANDLE_FIELD);
|
||||
query.setQuery(RESOURCE_TYPE_FIELD + ":[2 TO 4]");
|
||||
QueryResponse rsp = getSolr().query(query);
|
||||
SolrDocumentList docs = rsp.getResults();
|
||||
|
||||
@@ -475,7 +483,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
|
||||
SolrDocument doc = (SolrDocument) iter.next();
|
||||
|
||||
String handle = (String) doc.getFieldValue("handle");
|
||||
String handle = (String) doc.getFieldValue(HANDLE_FIELD);
|
||||
|
||||
DSpaceObject o = HandleManager.resolveToObject(context, handle);
|
||||
|
||||
@@ -616,7 +624,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
boolean inIndex = false;
|
||||
|
||||
SolrQuery query = new SolrQuery();
|
||||
query.setQuery("handle:" + handle);
|
||||
query.setQuery(HANDLE_FIELD + ":" + handle);
|
||||
// Specify that we ONLY want the LAST_INDEXED_FIELD returned in the field list (fl)
|
||||
query.setFields(LAST_INDEXED_FIELD);
|
||||
QueryResponse rsp;
|
||||
|
||||
try {
|
||||
@@ -1444,9 +1454,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
// New fields to weaken the dependence on handles, and allow for faster
|
||||
// list display
|
||||
doc.addField("search.uniqueid", type+"-"+id);
|
||||
doc.addField("search.resourcetype", Integer.toString(type));
|
||||
doc.addField(RESOURCE_TYPE_FIELD, Integer.toString(type));
|
||||
|
||||
doc.addField("search.resourceid", Integer.toString(id));
|
||||
doc.addField(RESOURCE_ID_FIELD, Integer.toString(id));
|
||||
|
||||
// want to be able to search for handle, so use keyword
|
||||
// (not tokenized, but it is indexed)
|
||||
@@ -1454,7 +1464,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
{
|
||||
// want to be able to search for handle, so use keyword
|
||||
// (not tokenized, but it is indexed)
|
||||
doc.addField("handle", handle);
|
||||
doc.addField(HANDLE_FIELD, handle);
|
||||
}
|
||||
|
||||
if (locations != null)
|
||||
@@ -1584,7 +1594,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
discoveryQuery.addFilterQueries("location:l" + dso.getID());
|
||||
} else if (dso instanceof Item)
|
||||
{
|
||||
discoveryQuery.addFilterQueries("handle:" + dso.getHandle());
|
||||
discoveryQuery.addFilterQueries(HANDLE_FIELD + ":" + dso.getHandle());
|
||||
}
|
||||
}
|
||||
return search(context, discoveryQuery, includeUnDiscoverable);
|
||||
@@ -1620,6 +1630,18 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
}
|
||||
|
||||
solrQuery.setQuery(query);
|
||||
|
||||
// Add any search fields to our query. This is the limited list
|
||||
// of fields that will be returned in the solr result
|
||||
for(String fieldName : discoveryQuery.getSearchFields())
|
||||
{
|
||||
solrQuery.addField(fieldName);
|
||||
}
|
||||
// Also ensure a few key obj identifier fields are returned with every query
|
||||
solrQuery.addField(HANDLE_FIELD);
|
||||
solrQuery.addField(RESOURCE_TYPE_FIELD);
|
||||
solrQuery.addField(RESOURCE_ID_FIELD);
|
||||
|
||||
if(discoveryQuery.isSpellCheck())
|
||||
{
|
||||
solrQuery.setParam(SpellingParams.SPELLCHECK_Q, query);
|
||||
@@ -1640,7 +1662,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
}
|
||||
if(discoveryQuery.getDSpaceObjectFilter() != -1)
|
||||
{
|
||||
solrQuery.addFilterQuery("search.resourcetype:" + discoveryQuery.getDSpaceObjectFilter());
|
||||
solrQuery.addFilterQuery(RESOURCE_TYPE_FIELD + ":" + discoveryQuery.getDSpaceObjectFilter());
|
||||
}
|
||||
|
||||
for (int i = 0; i < discoveryQuery.getFieldPresentQueries().size(); i++)
|
||||
@@ -1753,7 +1775,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
query.addFilterQueries("location:l" + dso.getID());
|
||||
} else if (dso instanceof Item)
|
||||
{
|
||||
query.addFilterQueries("handle:" + dso.getHandle());
|
||||
query.addFilterQueries(HANDLE_FIELD + ":" + dso.getHandle());
|
||||
}
|
||||
}
|
||||
return searchJSON(context, query, jsonIdentifier);
|
||||
@@ -1807,7 +1829,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
{
|
||||
result.addDSpaceObject(dso);
|
||||
} else {
|
||||
log.error(LogManager.getHeader(context, "Error while retrieving DSpace object from discovery index", "Handle: " + doc.getFirstValue("handle")));
|
||||
log.error(LogManager.getHeader(context, "Error while retrieving DSpace object from discovery index", "Handle: " + doc.getFirstValue(HANDLE_FIELD)));
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -1926,9 +1948,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
|
||||
protected static DSpaceObject findDSpaceObject(Context context, SolrDocument doc) throws SQLException {
|
||||
|
||||
Integer type = (Integer) doc.getFirstValue("search.resourcetype");
|
||||
Integer id = (Integer) doc.getFirstValue("search.resourceid");
|
||||
String handle = (String) doc.getFirstValue("handle");
|
||||
Integer type = (Integer) doc.getFirstValue(RESOURCE_TYPE_FIELD);
|
||||
Integer id = (Integer) doc.getFirstValue(RESOURCE_ID_FIELD);
|
||||
String handle = (String) doc.getFirstValue(HANDLE_FIELD);
|
||||
|
||||
if (type != null && id != null)
|
||||
{
|
||||
@@ -1981,7 +2003,8 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
|
||||
SolrQuery solrQuery = new SolrQuery();
|
||||
solrQuery.setQuery(query);
|
||||
solrQuery.setFields("search.resourceid", "search.resourcetype");
|
||||
//Only return obj identifier fields in result doc
|
||||
solrQuery.setFields(RESOURCE_ID_FIELD, RESOURCE_TYPE_FIELD);
|
||||
solrQuery.setStart(offset);
|
||||
solrQuery.setRows(max);
|
||||
if (orderfield != null)
|
||||
@@ -2001,7 +2024,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
{
|
||||
SolrDocument doc = (SolrDocument) iter.next();
|
||||
|
||||
DSpaceObject o = DSpaceObject.find(context, (Integer) doc.getFirstValue("search.resourcetype"), (Integer) doc.getFirstValue("search.resourceid"));
|
||||
DSpaceObject o = DSpaceObject.find(context, (Integer) doc.getFirstValue(RESOURCE_TYPE_FIELD), (Integer) doc.getFirstValue(RESOURCE_ID_FIELD));
|
||||
|
||||
if (o != null)
|
||||
{
|
||||
@@ -2089,7 +2112,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
try{
|
||||
SolrQuery solrQuery = new SolrQuery();
|
||||
//Set the query to handle since this is unique
|
||||
solrQuery.setQuery("handle: " + item.getHandle());
|
||||
solrQuery.setQuery(HANDLE_FIELD + ": " + item.getHandle());
|
||||
//Only return obj identifier fields in result doc
|
||||
solrQuery.setFields(HANDLE_FIELD, RESOURCE_TYPE_FIELD, RESOURCE_ID_FIELD);
|
||||
//Add the more like this parameters !
|
||||
solrQuery.setParam(MoreLikeThisParams.MLT, true);
|
||||
//Add a comma separated list of the similar fields
|
||||
@@ -2320,4 +2345,13 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
throw new SearchServiceException(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String escapeQueryChars(String query) {
|
||||
// Use Solr's built in query escape tool
|
||||
// WARNING: You should only escape characters from user entered queries,
|
||||
// otherwise you may accidentally BREAK field-based queries (which often
|
||||
// rely on special characters to separate the field from the query value)
|
||||
return ClientUtils.escapeQueryChars(query);
|
||||
}
|
||||
}
|
||||
|
@@ -284,14 +284,23 @@ public class EventManager
|
||||
{
|
||||
Context ctx = new Context();
|
||||
|
||||
for (Iterator ci = ((Dispatcher) dispatcher).getConsumers()
|
||||
.iterator(); ci.hasNext();)
|
||||
{
|
||||
ConsumerProfile cp = (ConsumerProfile) ci.next();
|
||||
if (cp != null)
|
||||
try {
|
||||
|
||||
for (Iterator ci = ((Dispatcher) dispatcher).getConsumers()
|
||||
.iterator(); ci.hasNext();)
|
||||
{
|
||||
cp.getConsumer().finish(ctx);
|
||||
ConsumerProfile cp = (ConsumerProfile) ci.next();
|
||||
if (cp != null)
|
||||
{
|
||||
cp.getConsumer().finish(ctx);
|
||||
}
|
||||
}
|
||||
|
||||
ctx.complete();
|
||||
|
||||
} catch (Exception e) {
|
||||
ctx.abort();
|
||||
throw e;
|
||||
}
|
||||
return;
|
||||
|
||||
|
@@ -9,89 +9,167 @@ package org.dspace.handle;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.InputStreamReader;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.storage.rdbms.DatabaseManager;
|
||||
import org.dspace.storage.rdbms.TableRow;
|
||||
import org.dspace.search.DSIndexer;
|
||||
import org.dspace.browse.IndexBrowse;
|
||||
import org.dspace.discovery.IndexClient;
|
||||
|
||||
/**
|
||||
* A script to update the handle values in the database. This is typically used
|
||||
* when moving from a test machine (handle = 123456789) to a production service.
|
||||
* when moving from a test machine (handle = 123456789) to a production service
|
||||
* or when make a test clone from production service.
|
||||
*
|
||||
* @author Stuart Lewis
|
||||
* @author Ivo Prajer (Czech Technical University in Prague)
|
||||
*/
|
||||
public class UpdateHandlePrefix
|
||||
{
|
||||
|
||||
private static final Logger log = Logger.getLogger(UpdateHandlePrefix.class);
|
||||
|
||||
/**
|
||||
* When invoked as a command-line tool, updates handle prefix
|
||||
*
|
||||
* @param args the command-line arguments, none used
|
||||
* @throws java.lang.Exception
|
||||
*
|
||||
*/
|
||||
public static void main(String[] args) throws Exception
|
||||
{
|
||||
// There should be two paramters
|
||||
// There should be two parameters
|
||||
if (args.length < 2)
|
||||
{
|
||||
System.out.println("\nUsage: update-handle-prefix <old handle> <new handle>\n");
|
||||
System.exit(1);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Confirm with the user that this is what they want to do
|
||||
String oldH = args[0];
|
||||
String newH = args[1];
|
||||
|
||||
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
|
||||
// Get info about changes
|
||||
System.out.println("\nGetting information about handles from database...");
|
||||
Context context = new Context();
|
||||
System.out.println("If you continue, all handles in your repository with prefix " +
|
||||
oldH + " will be updated to have handle prefix " + newH + "\n");
|
||||
String sql = "SELECT count(*) as count FROM handle " +
|
||||
String sql = "SELECT count(*) as count " +
|
||||
"FROM handle " +
|
||||
"WHERE handle LIKE '" + oldH + "%'";
|
||||
TableRow row = DatabaseManager.querySingle(context, sql, new Object[] {});
|
||||
long count = row.getLongColumn("count");
|
||||
System.out.println(count + " items will be updated.\n");
|
||||
System.out.print("Have you taken a backup, and are you ready to continue? [y/n]: ");
|
||||
String choiceString = input.readLine();
|
||||
|
||||
if (choiceString.equalsIgnoreCase("y"))
|
||||
if (count > 0)
|
||||
{
|
||||
// Make the changes
|
||||
System.out.print("Updating handle table... ");
|
||||
sql = "update handle set handle = '" + newH + "' || '/' || handle_id " +
|
||||
"where handle like '" + oldH + "/%'";
|
||||
int updated = DatabaseManager.updateQuery(context, sql, new Object[] {});
|
||||
System.out.println(updated + " items updated");
|
||||
// Print info text about changes
|
||||
System.out.println(
|
||||
"In your repository will be updated " + count + " handle" +
|
||||
((count > 1) ? "s" : "") + " to new prefix " + newH +
|
||||
" from original " + oldH + "!\n"
|
||||
);
|
||||
|
||||
System.out.print("Updating metadatavalues table... ");
|
||||
sql = "UPDATE metadatavalue SET text_value= (SELECT 'http://hdl.handle.net/' || " +
|
||||
"handle FROM handle WHERE handle.resource_id=item_id AND " +
|
||||
"handle.resource_type_id=2) WHERE text_value LIKE 'http://hdl.handle.net/%';";
|
||||
updated = DatabaseManager.updateQuery(context, sql, new Object[] {});
|
||||
System.out.println(updated + " metadata values updated");
|
||||
// Confirm with the user that this is what they want to do
|
||||
System.out.print(
|
||||
"Servlet container (e.g. Apache Tomcat, Jetty, Caucho Resin) must be running.\n" +
|
||||
"If it is necessary, please make a backup of the database.\n" +
|
||||
"Are you ready to continue? [y/n]: "
|
||||
);
|
||||
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
|
||||
String choiceString = input.readLine();
|
||||
|
||||
// Commit the changes
|
||||
context.complete();
|
||||
|
||||
System.out.print("Re-creating browse and search indexes... ");
|
||||
|
||||
// Reinitialise the browse system
|
||||
IndexBrowse.main(new String[] {"-i"});
|
||||
|
||||
// Reinitialise the browse system
|
||||
try
|
||||
if (choiceString.equalsIgnoreCase("y"))
|
||||
{
|
||||
DSIndexer.main(new String[0]);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
// Not a lot we can do
|
||||
System.out.println("Error re-indexing:");
|
||||
e.printStackTrace();
|
||||
System.out.println("\nPlease manually run [dspace]/bin/index-all");
|
||||
}
|
||||
try {
|
||||
log.info("Updating handle prefix from " + oldH + " to " + newH);
|
||||
|
||||
// All done
|
||||
System.out.println("\nHandles successfully updated.");
|
||||
// Make the changes
|
||||
System.out.print("\nUpdating handle table... ");
|
||||
sql = "UPDATE handle " +
|
||||
"SET handle = '" + newH + "' || '/' || handle_id " +
|
||||
"WHERE handle like '" + oldH + "/%'";
|
||||
int updHdl = DatabaseManager.updateQuery(context, sql, new Object[] {});
|
||||
System.out.println(
|
||||
updHdl + " item" + ((updHdl > 1) ? "s" : "") + " updated"
|
||||
);
|
||||
|
||||
System.out.print("Updating metadatavalues table... ");
|
||||
sql = "UPDATE metadatavalue " +
|
||||
"SET text_value = " +
|
||||
"(" +
|
||||
"SELECT 'http://hdl.handle.net/' || handle " +
|
||||
"FROM handle " +
|
||||
"WHERE handle.resource_id = metadatavalue.resource_id " +
|
||||
"AND handle.resource_type_id = 2" +
|
||||
") " +
|
||||
"WHERE text_value LIKE 'http://hdl.handle.net/" + oldH + "/%'" +
|
||||
"AND EXISTS " +
|
||||
"(" +
|
||||
"SELECT 1 " +
|
||||
"FROM handle " +
|
||||
"WHERE handle.resource_id = metadatavalue.resource_id " +
|
||||
"AND handle.resource_type_id = 2" +
|
||||
")";
|
||||
int updMeta = DatabaseManager.updateQuery(context, sql, new Object[] {});
|
||||
System.out.println(
|
||||
updMeta + " metadata value" + ((updMeta > 1) ? "s" : "") + " updated"
|
||||
);
|
||||
|
||||
// Commit the changes
|
||||
context.complete();
|
||||
|
||||
log.info(
|
||||
"Done with updating handle prefix. " +
|
||||
"It was changed " + updHdl + " handle" + ((updHdl > 1) ? "s" : "") +
|
||||
" and " + updMeta + " metadata record" + ((updMeta > 1) ? "s" : "")
|
||||
);
|
||||
|
||||
}
|
||||
catch (SQLException sqle)
|
||||
{
|
||||
if ((context != null) && (context.isValid()))
|
||||
{
|
||||
context.abort();
|
||||
context = null;
|
||||
}
|
||||
System.out.println("\nError during SQL operations.");
|
||||
throw sqle;
|
||||
}
|
||||
|
||||
System.out.println("Handles successfully updated in database.\n");
|
||||
System.out.println("Re-creating browse and search indexes...");
|
||||
|
||||
try
|
||||
{
|
||||
// Reinitialise the search and browse system
|
||||
IndexClient.main(new String[] {"-b"});
|
||||
System.out.println("Browse and search indexes are ready now.");
|
||||
// All done
|
||||
System.out.println("\nAll done successfully. Please check the DSpace logs!\n");
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
// Not a lot we can do
|
||||
System.out.println("Error during re-indexing.");
|
||||
System.out.println(
|
||||
"\n\nAutomatic re-indexing failed. Please perform it manually.\n" +
|
||||
"You should run one of the following commands:\n\n" +
|
||||
" [dspace]/bin/dspace index-discovery -b\n\n" +
|
||||
"If you are using Solr for browse (this is the default setting).\n" +
|
||||
"When launching this command, your servlet container must be running.\n\n" +
|
||||
" [dspace]/bin/dspace index-lucene-init\n\n" +
|
||||
"If you enabled Lucene for search.\n" +
|
||||
"When launching this command, your servlet container must be shutdown.\n"
|
||||
);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
System.out.println("No changes have been made to your data.\n");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
System.out.println("No changes have been made to your data.");
|
||||
System.out.println("Nothing to do! All handles are up-to-date.\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -10,7 +10,6 @@ package org.dspace.identifier;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Metadatum;
|
||||
@@ -68,12 +67,12 @@ public class DOIIdentifierProvider
|
||||
public static final String DOI_QUALIFIER = "uri";
|
||||
|
||||
public static final Integer TO_BE_REGISTERED = 1;
|
||||
public static final Integer TO_BE_RESERVERED = 2;
|
||||
public static final Integer TO_BE_RESERVED = 2;
|
||||
public static final Integer IS_REGISTERED = 3;
|
||||
public static final Integer IS_RESERVED = 4;
|
||||
public static final Integer UPDATE_RESERVERED = 5;
|
||||
public static final Integer UPDATE_RESERVED = 5;
|
||||
public static final Integer UPDATE_REGISTERED = 6;
|
||||
public static final Integer UPDATE_BEFORE_REGISTERATION = 7;
|
||||
public static final Integer UPDATE_BEFORE_REGISTRATION = 7;
|
||||
public static final Integer TO_BE_DELETED = 8;
|
||||
public static final Integer DELETED = 9;
|
||||
|
||||
@@ -251,7 +250,7 @@ public class DOIIdentifierProvider
|
||||
return;
|
||||
}
|
||||
|
||||
doiRow.setColumn("status", TO_BE_RESERVERED);
|
||||
doiRow.setColumn("status", TO_BE_RESERVED);
|
||||
try
|
||||
{
|
||||
DatabaseManager.update(context, doiRow);
|
||||
@@ -353,11 +352,11 @@ public class DOIIdentifierProvider
|
||||
}
|
||||
else if (TO_BE_REGISTERED == doiRow.getIntColumn("status"))
|
||||
{
|
||||
doiRow.setColumn("status", UPDATE_BEFORE_REGISTERATION);
|
||||
doiRow.setColumn("status", UPDATE_BEFORE_REGISTRATION);
|
||||
}
|
||||
else if (IS_RESERVED == doiRow.getIntColumn("status"))
|
||||
{
|
||||
doiRow.setColumn("status", UPDATE_RESERVERED);
|
||||
doiRow.setColumn("status", UPDATE_RESERVED);
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -416,11 +415,11 @@ public class DOIIdentifierProvider
|
||||
{
|
||||
doiRow.setColumn("status", IS_REGISTERED);
|
||||
}
|
||||
else if (UPDATE_BEFORE_REGISTERATION == doiRow.getIntColumn("status"))
|
||||
else if (UPDATE_BEFORE_REGISTRATION == doiRow.getIntColumn("status"))
|
||||
{
|
||||
doiRow.setColumn("status", TO_BE_REGISTERED);
|
||||
}
|
||||
else if (UPDATE_RESERVERED == doiRow.getIntColumn("status"))
|
||||
else if (UPDATE_RESERVED == doiRow.getIntColumn("status"))
|
||||
{
|
||||
doiRow.setColumn("status", IS_RESERVED);
|
||||
}
|
||||
|
@@ -561,7 +561,7 @@ public class EZIDIdentifierProvider
|
||||
/**
|
||||
* Map selected DSpace metadata to fields recognized by DataCite.
|
||||
*/
|
||||
private Map<String, String> crosswalkMetadata(DSpaceObject dso)
|
||||
Map<String, String> crosswalkMetadata(DSpaceObject dso)
|
||||
{
|
||||
if ((null == dso) || !(dso instanceof Item))
|
||||
{
|
||||
@@ -632,18 +632,42 @@ public class EZIDIdentifierProvider
|
||||
mapped.put(DATACITE_PUBLICATION_YEAR, year);
|
||||
}
|
||||
|
||||
// TODO find a way to get a current direct URL to the object and set _target
|
||||
// mapped.put("_target", url);
|
||||
// Supply _target link back to this object
|
||||
String handle = dso.getHandle();
|
||||
if (null == handle)
|
||||
{
|
||||
log.warn("{} #{} has no handle -- location not set.",
|
||||
dso.getTypeText(), dso.getID());
|
||||
}
|
||||
else
|
||||
{
|
||||
String url = configurationService.getProperty("dspace.url")
|
||||
+ "/handle/" + item.getHandle();
|
||||
log.info("Supplying location: {}", url);
|
||||
mapped.put("_target", url);
|
||||
}
|
||||
|
||||
return mapped;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide a map from DSO metadata keys to EZID keys. This will drive the
|
||||
* generation of EZID metadata for the minting of new identifiers.
|
||||
*
|
||||
* @param aCrosswalk
|
||||
*/
|
||||
@Required
|
||||
public void setCrosswalk(Map<String, String> aCrosswalk)
|
||||
{
|
||||
crosswalk = aCrosswalk;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide a map from DSO metadata keys to classes which can transform their
|
||||
* values to something acceptable to EZID.
|
||||
*
|
||||
* @param transformMap
|
||||
*/
|
||||
public void setCrosswalkTransform(Map<String, Transform> transformMap)
|
||||
{
|
||||
transforms = transformMap;
|
||||
|
@@ -179,19 +179,19 @@ public class DOIOrganiser {
|
||||
|
||||
if (line.hasOption('l'))
|
||||
{
|
||||
organiser.list("reservation", null, null, DOIIdentifierProvider.TO_BE_RESERVERED);
|
||||
organiser.list("reservation", null, null, DOIIdentifierProvider.TO_BE_RESERVED);
|
||||
organiser.list("registration", null, null, DOIIdentifierProvider.TO_BE_REGISTERED);
|
||||
organiser.list("update", null, null,
|
||||
DOIIdentifierProvider.UPDATE_BEFORE_REGISTERATION,
|
||||
DOIIdentifierProvider.UPDATE_BEFORE_REGISTRATION,
|
||||
DOIIdentifierProvider.UPDATE_REGISTERED,
|
||||
DOIIdentifierProvider.UPDATE_RESERVERED);
|
||||
DOIIdentifierProvider.UPDATE_RESERVED);
|
||||
organiser.list("deletion", null, null, DOIIdentifierProvider.TO_BE_DELETED);
|
||||
}
|
||||
|
||||
if (line.hasOption('s'))
|
||||
{
|
||||
TableRowIterator it = organiser
|
||||
.getDOIsByStatus(DOIIdentifierProvider.TO_BE_RESERVERED);
|
||||
.getDOIsByStatus(DOIIdentifierProvider.TO_BE_RESERVED);
|
||||
|
||||
try {
|
||||
if (!it.hasNext())
|
||||
@@ -244,8 +244,8 @@ public class DOIOrganiser {
|
||||
if (line.hasOption('u'))
|
||||
{
|
||||
TableRowIterator it = organiser.getDOIsByStatus(
|
||||
DOIIdentifierProvider.UPDATE_BEFORE_REGISTERATION,
|
||||
DOIIdentifierProvider.UPDATE_RESERVERED,
|
||||
DOIIdentifierProvider.UPDATE_BEFORE_REGISTRATION,
|
||||
DOIIdentifierProvider.UPDATE_RESERVED,
|
||||
DOIIdentifierProvider.UPDATE_REGISTERED);
|
||||
|
||||
try {
|
||||
|
@@ -20,6 +20,7 @@ import org.dspace.content.Bundle;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.Site;
|
||||
import org.dspace.content.WorkspaceItem;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.event.Consumer;
|
||||
@@ -52,6 +53,8 @@ public class RDFConsumer implements Consumer
|
||||
}
|
||||
|
||||
int sType = event.getSubjectType();
|
||||
log.debug(event.getEventTypeAsString() + " for "
|
||||
+ event.getSubjectTypeAsString() + ":" + event.getSubjectID());
|
||||
switch (sType)
|
||||
{
|
||||
case (Constants.BITSTREAM) :
|
||||
@@ -100,7 +103,7 @@ public class RDFConsumer implements Consumer
|
||||
Bitstream bitstream = Bitstream.find(ctx, event.getSubjectID());
|
||||
if (bitstream == null)
|
||||
{
|
||||
log.warn("Cannot find bitstream " + event.getSubjectID() + "! "
|
||||
log.debug("Cannot find bitstream " + event.getSubjectID() + "! "
|
||||
+ "Ignoring, as it is likely it was deleted "
|
||||
+ "and we'll cover it by a REMOVE event on its bundle.");
|
||||
return;
|
||||
@@ -111,6 +114,11 @@ public class RDFConsumer implements Consumer
|
||||
Item[] items = b.getItems();
|
||||
for (Item i : items)
|
||||
{
|
||||
if (WorkspaceItem.findByItem(ctx, i) != null)
|
||||
{
|
||||
log.debug("Ignoring Item " + i.getID() + " as a corresponding workspace item exists.");
|
||||
continue;
|
||||
}
|
||||
DSOIdentifier id = new DSOIdentifier(i, ctx);
|
||||
if (!this.toDelete.contains(id) && !this.toConvert.contains(id))
|
||||
{
|
||||
@@ -148,7 +156,7 @@ public class RDFConsumer implements Consumer
|
||||
Bundle bundle = Bundle.find(ctx, event.getSubjectID());
|
||||
if (bundle == null)
|
||||
{
|
||||
log.warn("Cannot find bundle " + event.getSubjectID() + "! "
|
||||
log.debug("Cannot find bundle " + event.getSubjectID() + "! "
|
||||
+ "Ignoring, as it is likely it was deleted "
|
||||
+ "and we'll cover it by a REMOVE event on its item.");
|
||||
return;
|
||||
@@ -156,6 +164,11 @@ public class RDFConsumer implements Consumer
|
||||
Item[] items = bundle.getItems();
|
||||
for (Item i : items)
|
||||
{
|
||||
if (WorkspaceItem.findByItem(ctx, i) != null)
|
||||
{
|
||||
log.debug("Ignoring Item " + i.getID() + " as a corresponding workspace item exists.");
|
||||
continue;
|
||||
}
|
||||
DSOIdentifier id = new DSOIdentifier(i, ctx);
|
||||
if (!this.toDelete.contains(id) && !this.toConvert.contains(id))
|
||||
{
|
||||
@@ -216,14 +229,24 @@ public class RDFConsumer implements Consumer
|
||||
DSpaceObject dso = event.getSubject(ctx);
|
||||
if (dso == null)
|
||||
{
|
||||
log.warn("Cannot find " + event.getSubjectTypeAsString() + " "
|
||||
log.debug("Cannot find " + event.getSubjectTypeAsString() + " "
|
||||
+ event.getSubjectID() + "! " + "Ignoring, as it is "
|
||||
+ "likely it was deleted and we'll cover it by another "
|
||||
+ "event with the type REMOVE.");
|
||||
return;
|
||||
}
|
||||
DSOIdentifier id = new DSOIdentifier(dso, ctx);
|
||||
|
||||
// ignore unfinished submissions here. Every unfinished submission
|
||||
// has an workspace item. The item flag "in_archive" doesn't help us
|
||||
// here as this is also set to false if a newer version was submitted.
|
||||
if (dso instanceof Item
|
||||
&& WorkspaceItem.findByItem(ctx, (Item) dso) != null)
|
||||
{
|
||||
log.debug("Ignoring Item " + dso.getID() + " as a corresponding workspace item exists.");
|
||||
return;
|
||||
}
|
||||
|
||||
DSOIdentifier id = new DSOIdentifier(dso, ctx);
|
||||
// If an item gets withdrawn, a MODIFIY event is fired. We have to
|
||||
// delete the item from the triple store instead of converting it.
|
||||
// we don't have to take care for reinstantions of items as they can
|
||||
|
@@ -331,6 +331,7 @@ public class SolrLogger
|
||||
{
|
||||
doc1.addField("userAgent", request.getHeader("User-Agent"));
|
||||
}
|
||||
doc1.addField("isBot",isSpiderBot);
|
||||
// Save the location information if valid, save the event without
|
||||
// location information if not valid
|
||||
if(locationService != null)
|
||||
@@ -354,7 +355,7 @@ public class SolrLogger
|
||||
doc1.addField("city", location.city);
|
||||
doc1.addField("latitude", location.latitude);
|
||||
doc1.addField("longitude", location.longitude);
|
||||
doc1.addField("isBot",isSpiderBot);
|
||||
|
||||
|
||||
|
||||
}
|
||||
@@ -416,6 +417,7 @@ public class SolrLogger
|
||||
{
|
||||
doc1.addField("userAgent", userAgent);
|
||||
}
|
||||
doc1.addField("isBot",isSpiderBot);
|
||||
// Save the location information if valid, save the event without
|
||||
// location information if not valid
|
||||
if(locationService != null)
|
||||
@@ -439,7 +441,7 @@ public class SolrLogger
|
||||
doc1.addField("city", location.city);
|
||||
doc1.addField("latitude", location.latitude);
|
||||
doc1.addField("longitude", location.longitude);
|
||||
doc1.addField("isBot",isSpiderBot);
|
||||
|
||||
|
||||
|
||||
}
|
||||
@@ -1338,6 +1340,7 @@ public class SolrLogger
|
||||
//Upload the data in the csv files to our new solr core
|
||||
ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest("/update/csv");
|
||||
contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
|
||||
contentStreamUpdateRequest.setParam("skip", "_version_");
|
||||
contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
|
||||
contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");
|
||||
|
||||
|
@@ -15,6 +15,7 @@ import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.Collections;
|
||||
import java.util.regex.Pattern;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
@@ -42,10 +43,10 @@ public class SpiderDetector {
|
||||
private static IPTable table = null;
|
||||
|
||||
/** Collection of regular expressions to match known spiders' agents. */
|
||||
private static List<Pattern> agents = new ArrayList<Pattern>();
|
||||
private static List<Pattern> agents = Collections.synchronizedList(new ArrayList<Pattern>());
|
||||
|
||||
/** Collection of regular expressions to match known spiders' domain names. */
|
||||
private static List<Pattern> domains = new ArrayList<Pattern>();
|
||||
private static List<Pattern> domains = Collections.synchronizedList(new ArrayList<Pattern>());
|
||||
|
||||
/**
|
||||
* Utility method which reads lines from a file & returns them in a Set.
|
||||
@@ -199,13 +200,15 @@ public class SpiderDetector {
|
||||
{
|
||||
// See if any agent patterns match
|
||||
if (null != agent)
|
||||
{
|
||||
if (agents.isEmpty())
|
||||
loadPatterns("agents", agents);
|
||||
|
||||
{
|
||||
synchronized(agents)
|
||||
{
|
||||
if (agents.isEmpty())
|
||||
loadPatterns("agents", agents);
|
||||
}
|
||||
for (Pattern candidate : agents)
|
||||
{
|
||||
// prevent matcher() invocation from a null Pattern object
|
||||
// prevent matcher() invocation from a null Pattern object
|
||||
if (null != candidate && candidate.matcher(agent).find())
|
||||
{
|
||||
return true;
|
||||
@@ -230,15 +233,15 @@ public class SpiderDetector {
|
||||
// No. See if any DNS names match
|
||||
if (null != hostname)
|
||||
{
|
||||
if (domains.isEmpty())
|
||||
synchronized(domains)
|
||||
{
|
||||
loadPatterns("domains", domains);
|
||||
if (domains.isEmpty())
|
||||
loadPatterns("domains", domains);
|
||||
}
|
||||
|
||||
for (Pattern candidate : domains)
|
||||
{
|
||||
// prevent matcher() invocation from a null Pattern object
|
||||
if (null != candidate && candidate.matcher(hostname).find())
|
||||
// prevent matcher() invocation from a null Pattern object
|
||||
if (null != candidate && candidate.matcher(hostname).find())
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
@@ -15,7 +15,6 @@ import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.ResultSetMetaData;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.SQLWarning;
|
||||
import java.sql.Statement;
|
||||
import java.sql.Time;
|
||||
import java.sql.Timestamp;
|
||||
@@ -35,8 +34,6 @@ import javax.sql.DataSource;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.flywaydb.core.Flyway;
|
||||
import org.flywaydb.core.api.MigrationInfo;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -881,22 +878,22 @@ public class DatabaseManager
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the canonical name for a table.
|
||||
* Return the canonical name for a database object.
|
||||
*
|
||||
* @param table
|
||||
* The name of the table.
|
||||
* @return The canonical name of the table.
|
||||
* @param db_object
|
||||
* The name of the database object.
|
||||
* @return The canonical name of the database object.
|
||||
*/
|
||||
static String canonicalize(String table)
|
||||
static String canonicalize(String db_object)
|
||||
{
|
||||
// Oracle expects upper-case table names
|
||||
// Oracle expects upper-case table names, schemas, etc.
|
||||
if (isOracle)
|
||||
{
|
||||
return (table == null) ? null : table.toUpperCase();
|
||||
return (db_object == null) ? null : db_object.toUpperCase();
|
||||
}
|
||||
|
||||
// default database postgres wants lower-case table names
|
||||
return (table == null) ? null : table.toLowerCase();
|
||||
return (db_object == null) ? null : db_object.toLowerCase();
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
@@ -1237,10 +1234,6 @@ public class DatabaseManager
|
||||
|
||||
try
|
||||
{
|
||||
String schema = ConfigurationManager.getProperty("db.schema");
|
||||
if(StringUtils.isBlank(schema)){
|
||||
schema = null;
|
||||
}
|
||||
String catalog = null;
|
||||
|
||||
int dotIndex = table.indexOf('.');
|
||||
@@ -1254,6 +1247,9 @@ public class DatabaseManager
|
||||
|
||||
connection = getConnection();
|
||||
|
||||
// Get current database schema name
|
||||
String schema = DatabaseUtils.getSchemaName(connection);
|
||||
|
||||
DatabaseMetaData metadata = connection.getMetaData();
|
||||
Map<String, ColumnInfo> results = new HashMap<String, ColumnInfo>();
|
||||
|
||||
|
@@ -873,8 +873,10 @@ public class DatabaseUtils
|
||||
* Get the Database Schema Name in use by this Connection, so that it can
|
||||
* be used to limit queries in other methods (e.g. tableExists()).
|
||||
* <P>
|
||||
* For PostgreSQL, schema is simply what is configured in db.schema or "public"
|
||||
* For Oracle, schema is actually the database *USER* or owner.
|
||||
* NOTE: Once we upgrade to using Apache Commons DBCP / Pool version 2.0,
|
||||
* this method WILL BE REMOVED in favor of java.sql.Connection's new
|
||||
* "getSchema()" method.
|
||||
* http://docs.oracle.com/javase/7/docs/api/java/sql/Connection.html#getSchema()
|
||||
*
|
||||
* @param connection
|
||||
* Current Database Connection
|
||||
@@ -886,27 +888,29 @@ public class DatabaseUtils
|
||||
String schema = null;
|
||||
DatabaseMetaData meta = connection.getMetaData();
|
||||
|
||||
// Determine our DB type
|
||||
String dbType = DatabaseManager.findDbKeyword(meta);
|
||||
// Check the configured "db.schema" FIRST for the value configured there
|
||||
schema = DatabaseManager.canonicalize(ConfigurationManager.getProperty("db.schema"));
|
||||
|
||||
if(dbType.equals(DatabaseManager.DBMS_POSTGRES))
|
||||
// If unspecified, determine "sane" defaults based on DB type
|
||||
if(StringUtils.isBlank(schema))
|
||||
{
|
||||
// Get the schema name from "db.schema"
|
||||
schema = ConfigurationManager.getProperty("db.schema");
|
||||
|
||||
// If unspecified, default schema is "public"
|
||||
if(StringUtils.isBlank(schema)){
|
||||
String dbType = DatabaseManager.findDbKeyword(meta);
|
||||
|
||||
if(dbType.equals(DatabaseManager.DBMS_POSTGRES))
|
||||
{
|
||||
// For PostgreSQL, the default schema is named "public"
|
||||
// See: http://www.postgresql.org/docs/9.0/static/ddl-schemas.html
|
||||
schema = "public";
|
||||
}
|
||||
else if (dbType.equals(DatabaseManager.DBMS_ORACLE))
|
||||
{
|
||||
// For Oracle, default schema is actually the user account
|
||||
// See: http://stackoverflow.com/a/13341390
|
||||
schema = meta.getUserName();
|
||||
}
|
||||
else
|
||||
schema = null;
|
||||
}
|
||||
else if (dbType.equals(DatabaseManager.DBMS_ORACLE))
|
||||
{
|
||||
// Schema is actually the user account
|
||||
// See: http://stackoverflow.com/a/13341390
|
||||
schema = meta.getUserName();
|
||||
}
|
||||
else
|
||||
schema = null;
|
||||
|
||||
return schema;
|
||||
}
|
||||
|
@@ -23,18 +23,17 @@ import org.apache.commons.lang.StringUtils;
|
||||
public class MigrationUtils
|
||||
{
|
||||
/**
|
||||
* Drop a given Database Constraint (based on the current database type).
|
||||
* Drop a given Database Column Constraint (based on the current database type).
|
||||
* Returns a "checksum" for this migration which can be used as part of
|
||||
* a Flyway Java migration
|
||||
*
|
||||
* @param connection the current Database connection
|
||||
* @param tableName the name of the table the constraint applies to
|
||||
* @param columnName the name of the column the constraint applies to
|
||||
* @param constraintSuffix Only used for PostgreSQL, whose constraint naming convention depends on a suffix (key, fkey, etc)
|
||||
* @return migration checksum as an Integer
|
||||
* @throws SQLException if a database error occurs
|
||||
*/
|
||||
public static Integer dropDBConstraint(Connection connection, String tableName, String columnName, String constraintSuffix)
|
||||
public static Integer dropDBConstraint(Connection connection, String tableName, String columnName)
|
||||
throws SQLException
|
||||
{
|
||||
Integer checksum = -1;
|
||||
@@ -48,13 +47,17 @@ public class MigrationUtils
|
||||
String dbtype = DatabaseManager.findDbKeyword(meta);
|
||||
String constraintName = null;
|
||||
String constraintNameSQL = null;
|
||||
String schemaName = null;
|
||||
switch(dbtype)
|
||||
{
|
||||
case DatabaseManager.DBMS_POSTGRES:
|
||||
// In Postgres, constraints are always named:
|
||||
// {tablename}_{columnname(s)}_{suffix}
|
||||
// see: http://stackoverflow.com/a/4108266/3750035
|
||||
constraintName = StringUtils.lowerCase(tableName) + "_" + StringUtils.lowerCase(columnName) + "_" + StringUtils.lowerCase(constraintSuffix);
|
||||
// In Postgres, column constraints are listed in the "information_schema.key_column_usage" view
|
||||
// See: http://www.postgresql.org/docs/9.4/static/infoschema-key-column-usage.html
|
||||
constraintNameSQL = "SELECT DISTINCT CONSTRAINT_NAME " +
|
||||
"FROM information_schema.key_column_usage " +
|
||||
"WHERE TABLE_NAME = ? AND COLUMN_NAME = ? AND TABLE_SCHEMA = ?";
|
||||
// For Postgres, we need to limit by the schema as well
|
||||
schemaName = DatabaseUtils.getSchemaName(connection);
|
||||
break;
|
||||
case DatabaseManager.DBMS_ORACLE:
|
||||
// In Oracle, constraints are listed in the USER_CONS_COLUMNS table
|
||||
@@ -72,35 +75,46 @@ public class MigrationUtils
|
||||
throw new SQLException("DBMS " + dbtype + " is unsupported in this migration.");
|
||||
}
|
||||
|
||||
// If we have a SQL query to run for the constraint name, then run it
|
||||
if (constraintNameSQL!=null)
|
||||
// Run the query to obtain the constraint name, passing it the parameters
|
||||
PreparedStatement statement = connection.prepareStatement(constraintNameSQL);
|
||||
statement.setString(1, DatabaseUtils.canonicalize(connection, tableName));
|
||||
statement.setString(2, DatabaseUtils.canonicalize(connection, columnName));
|
||||
// Also limit by database schema, if a schemaName has been set (only needed for PostgreSQL)
|
||||
if(schemaName!=null && !schemaName.isEmpty())
|
||||
{
|
||||
// Run the query to obtain the constraint name, passing it the parameters
|
||||
PreparedStatement statement = connection.prepareStatement(constraintNameSQL);
|
||||
statement.setString(1, StringUtils.upperCase(tableName));
|
||||
statement.setString(2, StringUtils.upperCase(columnName));
|
||||
try
|
||||
statement.setString(3, DatabaseUtils.canonicalize(connection, schemaName));
|
||||
}
|
||||
try
|
||||
{
|
||||
ResultSet results = statement.executeQuery();
|
||||
if(results.next())
|
||||
{
|
||||
ResultSet results = statement.executeQuery();
|
||||
if(results.next())
|
||||
{
|
||||
constraintName = results.getString("CONSTRAINT_NAME");
|
||||
}
|
||||
results.close();
|
||||
}
|
||||
finally
|
||||
{
|
||||
statement.close();
|
||||
constraintName = results.getString("CONSTRAINT_NAME");
|
||||
}
|
||||
results.close();
|
||||
}
|
||||
finally
|
||||
{
|
||||
statement.close();
|
||||
}
|
||||
|
||||
// As long as we have a constraint name, drop it
|
||||
if (constraintName!=null && !constraintName.isEmpty())
|
||||
{
|
||||
// This drop constaint SQL should be the same in all databases
|
||||
String dropConstraintSQL = "ALTER TABLE " + tableName + " DROP CONSTRAINT " + constraintName;
|
||||
// Canonicalize the constraintName
|
||||
constraintName = DatabaseUtils.canonicalize(connection, constraintName);
|
||||
// If constraintName starts with a $, surround with double quotes
|
||||
// (This is mostly for PostgreSQL, which sometimes names constraints $1, $2, etc)
|
||||
if(constraintName.startsWith("$"))
|
||||
{
|
||||
constraintName = "\"" + constraintName + "\"";
|
||||
}
|
||||
|
||||
PreparedStatement statement = connection.prepareStatement(dropConstraintSQL);
|
||||
// This drop constaint SQL should be the same in all databases
|
||||
String dropConstraintSQL = "ALTER TABLE " + DatabaseUtils.canonicalize(connection, tableName) +
|
||||
" DROP CONSTRAINT " + constraintName;
|
||||
|
||||
statement = connection.prepareStatement(dropConstraintSQL);
|
||||
try
|
||||
{
|
||||
statement.execute();
|
||||
|
@@ -13,8 +13,6 @@ import java.sql.SQLException;
|
||||
import org.dspace.storage.rdbms.MigrationUtils;
|
||||
import org.flywaydb.core.api.migration.MigrationChecksumProvider;
|
||||
import org.flywaydb.core.api.migration.jdbc.JdbcMigration;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This class is in support of the "V1.4__Upgrade_to_DSpace_1.4_schema.sql"
|
||||
@@ -42,9 +40,6 @@ import org.slf4j.LoggerFactory;
|
||||
public class V1_3_9__Drop_constraint_for_DSpace_1_4_schema
|
||||
implements JdbcMigration, MigrationChecksumProvider
|
||||
{
|
||||
/** logging category */
|
||||
private static final Logger log = LoggerFactory.getLogger(V1_3_9__Drop_constraint_for_DSpace_1_4_schema.class);
|
||||
|
||||
/* The checksum to report for this migration (when successful) */
|
||||
private int checksum = -1;
|
||||
|
||||
@@ -57,7 +52,7 @@ public class V1_3_9__Drop_constraint_for_DSpace_1_4_schema
|
||||
throws IOException, SQLException
|
||||
{
|
||||
// Drop the constraint associated with "name" column of "community"
|
||||
checksum = MigrationUtils.dropDBConstraint(connection, "community", "name", "key");
|
||||
checksum = MigrationUtils.dropDBConstraint(connection, "community", "name");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -13,8 +13,6 @@ import java.sql.SQLException;
|
||||
import org.dspace.storage.rdbms.MigrationUtils;
|
||||
import org.flywaydb.core.api.migration.MigrationChecksumProvider;
|
||||
import org.flywaydb.core.api.migration.jdbc.JdbcMigration;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This class is in support of the "V1.6__Upgrade_to_DSpace_1.6_schema.sql"
|
||||
@@ -42,9 +40,6 @@ import org.slf4j.LoggerFactory;
|
||||
public class V1_5_9__Drop_constraint_for_DSpace_1_6_schema
|
||||
implements JdbcMigration, MigrationChecksumProvider
|
||||
{
|
||||
/** logging category */
|
||||
private static final Logger log = LoggerFactory.getLogger(V1_5_9__Drop_constraint_for_DSpace_1_6_schema.class);
|
||||
|
||||
/* The checksum to report for this migration (when successful) */
|
||||
private int checksum = -1;
|
||||
|
||||
@@ -57,11 +52,11 @@ public class V1_5_9__Drop_constraint_for_DSpace_1_6_schema
|
||||
throws IOException, SQLException
|
||||
{
|
||||
// Drop the constraint associated with "collection_id" column of "community2collection" table
|
||||
int return1 = MigrationUtils.dropDBConstraint(connection, "community2collection", "collection_id", "fkey");
|
||||
int return1 = MigrationUtils.dropDBConstraint(connection, "community2collection", "collection_id");
|
||||
// Drop the constraint associated with "child_comm_id" column of "community2community" table
|
||||
int return2 = MigrationUtils.dropDBConstraint(connection, "community2community", "child_comm_id", "fkey");
|
||||
int return2 = MigrationUtils.dropDBConstraint(connection, "community2community", "child_comm_id");
|
||||
// Drop the constraint associated with "item_id" column of "collection2item" table
|
||||
int return3 = MigrationUtils.dropDBConstraint(connection, "collection2item", "item_id", "fkey");
|
||||
int return3 = MigrationUtils.dropDBConstraint(connection, "collection2item", "item_id");
|
||||
|
||||
// Checksum will just be the sum of those three return values
|
||||
checksum = return1 + return2 + return3;
|
||||
|
@@ -13,8 +13,6 @@ import java.sql.SQLException;
|
||||
import org.dspace.storage.rdbms.MigrationUtils;
|
||||
import org.flywaydb.core.api.migration.MigrationChecksumProvider;
|
||||
import org.flywaydb.core.api.migration.jdbc.JdbcMigration;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This class is in support of the DS-1582 Metadata for All Objects feature.
|
||||
@@ -37,30 +35,27 @@ import org.slf4j.LoggerFactory;
|
||||
* <P>
|
||||
* This class represents a Flyway DB Java Migration
|
||||
* http://flywaydb.org/documentation/migration/java.html
|
||||
*
|
||||
*
|
||||
* @author Tim Donohue
|
||||
*/
|
||||
public class V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint
|
||||
implements JdbcMigration, MigrationChecksumProvider
|
||||
{
|
||||
/** logging category */
|
||||
private static final Logger log = LoggerFactory.getLogger(V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.class);
|
||||
|
||||
/* The checksum to report for this migration (when successful) */
|
||||
private int checksum = -1;
|
||||
|
||||
|
||||
/**
|
||||
* Actually migrate the existing database
|
||||
* @param connection
|
||||
* @param connection
|
||||
*/
|
||||
@Override
|
||||
public void migrate(Connection connection)
|
||||
throws IOException, SQLException
|
||||
{
|
||||
// Drop the constraint associated with "item_id" column of "metadatavalue"
|
||||
checksum = MigrationUtils.dropDBConstraint(connection, "metadatavalue", "item_id", "fkey");
|
||||
checksum = MigrationUtils.dropDBConstraint(connection, "metadatavalue", "item_id");
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return the checksum to be associated with this Migration
|
||||
* in the Flyway database table (schema_version).
|
||||
|
@@ -98,7 +98,7 @@ public class PubmedService
|
||||
|
||||
public List<Record> search(String query) throws IOException, HttpException
|
||||
{
|
||||
List<Record> results = null;
|
||||
List<Record> results = new ArrayList<>();
|
||||
if (!ConfigurationManager.getBooleanProperty(SubmissionLookupService.CFG_MODULE, "remoteservice.demo"))
|
||||
{
|
||||
HttpGet method = null;
|
||||
|
@@ -352,7 +352,7 @@ public class UploadWithEmbargoStep extends UploadStep
|
||||
String fileDescription = (String) request.getAttribute(param + "-description");
|
||||
if(fileDescription==null ||fileDescription.length()==0)
|
||||
{
|
||||
request.getParameter("description");
|
||||
fileDescription = request.getParameter("description");
|
||||
}
|
||||
|
||||
// if information wasn't passed by User Interface, we had a problem
|
||||
|
719
dspace-api/src/main/java/org/dspace/util/SolrImportExport.java
Normal file
719
dspace-api/src/main/java/org/dspace/util/SolrImportExport.java
Normal file
@@ -0,0 +1,719 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.util;
|
||||
|
||||
import org.apache.commons.cli.*;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.client.solrj.impl.HttpSolrServer;
|
||||
import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
|
||||
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
|
||||
import org.apache.solr.client.solrj.request.CoreAdminRequest;
|
||||
import org.apache.solr.client.solrj.request.LukeRequest;
|
||||
import org.apache.solr.client.solrj.response.CoreAdminResponse;
|
||||
import org.apache.solr.client.solrj.response.FieldStatsInfo;
|
||||
import org.apache.solr.client.solrj.response.LukeResponse;
|
||||
import org.apache.solr.client.solrj.response.RangeFacet;
|
||||
import org.apache.solr.common.luke.FieldFlag;
|
||||
import org.apache.solr.common.params.CoreAdminParams;
|
||||
import org.apache.solr.common.params.FacetParams;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FilenameFilter;
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.nio.file.FileStore;
|
||||
import java.text.*;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* Utility class to export, clear and import Solr indexes.
|
||||
* @author Andrea Schweer schweer@waikato.ac.nz for the LCoNZ Institutional Research Repositories
|
||||
*/
|
||||
public class SolrImportExport
|
||||
{
|
||||
|
||||
private static final DateFormat SOLR_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
|
||||
private static final DateFormat SOLR_DATE_FORMAT_NO_MS = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
|
||||
private static final DateFormat EXPORT_DATE_FORMAT = new SimpleDateFormat("yyyy-MM");
|
||||
|
||||
static
|
||||
{
|
||||
SOLR_DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("UTC"));
|
||||
EXPORT_DATE_FORMAT.setTimeZone(TimeZone.getDefault());
|
||||
}
|
||||
|
||||
private static final String ACTION_OPTION = "a";
|
||||
private static final String CLEAR_OPTION = "c";
|
||||
private static final String DIRECTORY_OPTION = "d";
|
||||
private static final String HELP_OPTION = "h";
|
||||
private static final String INDEX_NAME_OPTION = "i";
|
||||
private static final String KEEP_OPTION = "k";
|
||||
private static final String LAST_OPTION = "l";
|
||||
|
||||
public static final int ROWS_PER_FILE = 10_000;
|
||||
|
||||
private static final Logger log = Logger.getLogger(SolrImportExport.class);
|
||||
|
||||
/**
|
||||
* Entry point for command-line invocation
|
||||
* @param args command-line arguments; see help for description
|
||||
* @throws ParseException if the command-line arguments cannot be parsed
|
||||
*/
|
||||
public static void main(String[] args) throws ParseException
|
||||
{
|
||||
CommandLineParser parser = new PosixParser();
|
||||
Options options = makeOptions();
|
||||
|
||||
try
|
||||
{
|
||||
CommandLine line = parser.parse(options, args);
|
||||
if (line.hasOption(HELP_OPTION))
|
||||
{
|
||||
printHelpAndExit(options, 0);
|
||||
}
|
||||
|
||||
if (!line.hasOption(INDEX_NAME_OPTION))
|
||||
{
|
||||
System.err.println("This command requires the index-name option but none was present.");
|
||||
printHelpAndExit(options, 1);
|
||||
}
|
||||
String[] indexNames = line.getOptionValues(INDEX_NAME_OPTION);
|
||||
|
||||
String directoryName = makeDirectoryName(line.getOptionValue(DIRECTORY_OPTION));
|
||||
|
||||
String action = line.getOptionValue(ACTION_OPTION, "export");
|
||||
if ("import".equals(action))
|
||||
{
|
||||
for (String indexName : indexNames)
|
||||
{
|
||||
File importDir = new File(directoryName);
|
||||
if (!importDir.exists() || !importDir.canRead())
|
||||
{
|
||||
System.err.println("Import directory " + directoryName
|
||||
+ " doesn't exist or is not readable by the current user. Not importing index "
|
||||
+ indexName);
|
||||
continue; // skip this index
|
||||
}
|
||||
try
|
||||
{
|
||||
String solrUrl = makeSolrUrl(indexName);
|
||||
boolean clear = line.hasOption(CLEAR_OPTION);
|
||||
importIndex(indexName, importDir, solrUrl, clear, clear);
|
||||
}
|
||||
catch (IOException | SolrServerException | SolrImportExportException e)
|
||||
{
|
||||
System.err.println("Problem encountered while trying to import index " + indexName + ".");
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if ("export".equals(action))
|
||||
{
|
||||
for (String indexName : indexNames)
|
||||
{
|
||||
String lastValue = line.getOptionValue(LAST_OPTION);
|
||||
File exportDir = new File(directoryName);
|
||||
if (exportDir.exists() && !exportDir.canWrite())
|
||||
{
|
||||
System.err.println("Export directory " + directoryName
|
||||
+ " is not writable by the current user. Not exporting index "
|
||||
+ indexName);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!exportDir.exists())
|
||||
{
|
||||
boolean created = exportDir.mkdirs();
|
||||
if (!created)
|
||||
{
|
||||
System.err.println("Export directory " + directoryName
|
||||
+ " could not be created. Not exporting index " + indexName);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
String solrUrl = makeSolrUrl(indexName);
|
||||
String timeField = makeTimeField(indexName);
|
||||
exportIndex(indexName, exportDir, solrUrl, timeField, lastValue);
|
||||
}
|
||||
catch (SolrServerException | IOException | SolrImportExportException e)
|
||||
{
|
||||
System.err.println("Problem encountered while trying to export index " + indexName + ".");
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if ("reindex".equals(action))
|
||||
{
|
||||
for (String indexName : indexNames)
|
||||
{
|
||||
try {
|
||||
boolean keepExport = line.hasOption(KEEP_OPTION);
|
||||
reindex(indexName, directoryName, keepExport);
|
||||
} catch (IOException | SolrServerException | SolrImportExportException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
System.err.println("Unknown action " + action + "; must be import, export or reindex.");
|
||||
printHelpAndExit(options, 1);
|
||||
}
|
||||
}
|
||||
catch (ParseException e)
|
||||
{
|
||||
System.err.println("Cannot read command options");
|
||||
printHelpAndExit(options, 1);
|
||||
}
|
||||
}
|
||||
|
||||
private static Options makeOptions() {
|
||||
Options options = new Options();
|
||||
options.addOption(ACTION_OPTION, "action", true, "The action to perform: import, export or reindex. Default: export.");
|
||||
options.addOption(CLEAR_OPTION, "clear", false, "When importing, also clear the index first. Ignored when action is export or reindex.");
|
||||
options.addOption(DIRECTORY_OPTION, "directory", true,
|
||||
"The absolute path for the directory to use for import or export. If omitted, [dspace]/solr-export is used.");
|
||||
options.addOption(HELP_OPTION, "help", false, "Get help on options for this command.");
|
||||
options.addOption(INDEX_NAME_OPTION, "index-name", true,
|
||||
"The names of the indexes to process. At least one is required. Available indexes are: authority, statistics.");
|
||||
options.addOption(KEEP_OPTION, "keep", false, "When reindexing, keep the contents of the data export directory." +
|
||||
" By default, the contents of this directory will be deleted once the reindex has finished." +
|
||||
" Ignored when action is export or import.");
|
||||
options.addOption(LAST_OPTION, "last", true, "When exporting, export records from the last [timeperiod] only." +
|
||||
" This can be one of: 'd' (beginning of yesterday through to now);" +
|
||||
" 'm' (beginning of the previous month through to end of the previous month);" +
|
||||
" a number, in which case the last [number] of days are exported, through to now (use 0 for today's data)." +
|
||||
" Date calculation is done in UTC. If omitted, all documents are exported.");
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reindexes the specified core
|
||||
*
|
||||
* @param indexName the name of the core to reindex
|
||||
* @param exportDirName the name of the directory to use for export. If this directory doesn't exist, it will be created.
|
||||
* @param keepExport whether to keep the contents of the exportDir after the reindex. If keepExport is false and the
|
||||
* export directory was created by this method, the export directory will be deleted at the end of the reimport.
|
||||
*/
|
||||
private static void reindex(String indexName, String exportDirName, boolean keepExport)
|
||||
throws IOException, SolrServerException, SolrImportExportException {
|
||||
String tempIndexName = indexName + "-temp";
|
||||
|
||||
String origSolrUrl = makeSolrUrl(indexName);
|
||||
String baseSolrUrl = StringUtils.substringBeforeLast(origSolrUrl, "/"); // need to get non-core solr URL
|
||||
String tempSolrUrl = baseSolrUrl + "/" + tempIndexName;
|
||||
|
||||
String solrInstanceDir = ConfigurationManager.getProperty("dspace.dir") + File.separator + "solr" + File.separator + indexName;
|
||||
// the [dspace]/solr/[indexName]/conf directory needs to be available on the local machine for this to work
|
||||
// -- we need access to the schema.xml and solrconfig.xml file, plus files referenced from there
|
||||
// if this directory can't be found, output an error message and skip this index
|
||||
File solrInstance = new File(solrInstanceDir);
|
||||
if (!solrInstance.exists() || !solrInstance.canRead() || !solrInstance.isDirectory())
|
||||
{
|
||||
throw new SolrImportExportException("Directory " + solrInstanceDir + "/conf/ doesn't exist or isn't readable." +
|
||||
" The reindexing process requires the Solr configuration directory for this index to be present on the local machine" +
|
||||
" even if Solr is running on a different host. Not reindexing index " + indexName);
|
||||
}
|
||||
|
||||
String timeField = makeTimeField(indexName);
|
||||
|
||||
// Ensure the export directory exists and is writable
|
||||
File exportDir = new File(exportDirName);
|
||||
boolean createdExportDir = exportDir.mkdirs();
|
||||
if (!createdExportDir && !exportDir.exists())
|
||||
{
|
||||
throw new SolrImportExportException("Could not create export directory " + exportDirName);
|
||||
}
|
||||
if (!exportDir.canWrite())
|
||||
{
|
||||
throw new SolrImportExportException("Can't write to export directory " + exportDirName);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
HttpSolrServer adminSolr = new HttpSolrServer(baseSolrUrl);
|
||||
|
||||
// try to find out size of core and compare with free space in export directory
|
||||
CoreAdminResponse status = CoreAdminRequest.getStatus(indexName, adminSolr);
|
||||
Object coreSizeObj = status.getCoreStatus(indexName).get("sizeInBytes");
|
||||
long coreSize = coreSizeObj != null ? Long.valueOf(coreSizeObj.toString()) : -1;
|
||||
long usableExportSpace = exportDir.getUsableSpace();
|
||||
if (coreSize >= 0 && usableExportSpace < coreSize)
|
||||
{
|
||||
System.err.println("Not enough space in export directory " + exportDirName
|
||||
+ "; need at least as much space as the index ("
|
||||
+ FileUtils.byteCountToDisplaySize(coreSize)
|
||||
+ ") but usable space in export directory is only "
|
||||
+ FileUtils.byteCountToDisplaySize(usableExportSpace)
|
||||
+ ". Not continuing with reindex, please use the " + DIRECTORY_OPTION
|
||||
+ " option to specify an alternative export directy with sufficient space.");
|
||||
return;
|
||||
}
|
||||
|
||||
// Create a temp directory to store temporary core data
|
||||
File tempDataDir = new File(ConfigurationManager.getProperty("dspace.dir") + File.separator + "temp" + File.separator + "solr-data");
|
||||
boolean createdTempDataDir = tempDataDir.mkdirs();
|
||||
if (!createdTempDataDir && !tempDataDir.exists())
|
||||
{
|
||||
throw new SolrImportExportException("Could not create temporary data directory " + tempDataDir.getCanonicalPath());
|
||||
}
|
||||
if (!tempDataDir.canWrite())
|
||||
{
|
||||
throw new SolrImportExportException("Can't write to temporary data directory " + tempDataDir.getCanonicalPath());
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// create a temporary core to hold documents coming in during the reindex
|
||||
CoreAdminRequest.Create createRequest = new CoreAdminRequest.Create();
|
||||
createRequest.setInstanceDir(solrInstanceDir);
|
||||
createRequest.setDataDir(tempDataDir.getCanonicalPath());
|
||||
createRequest.setCoreName(tempIndexName);
|
||||
|
||||
createRequest.process(adminSolr).getStatus();
|
||||
}
|
||||
catch (SolrServerException e)
|
||||
{
|
||||
// try to continue -- it may just be that the core already existed from a previous, failed attempt
|
||||
System.err.println("Caught exception when trying to create temporary core: " + e.getMessage() + "; trying to recover.");
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
|
||||
// swap actual core with temporary one
|
||||
CoreAdminRequest swapRequest = new CoreAdminRequest();
|
||||
swapRequest.setCoreName(indexName);
|
||||
swapRequest.setOtherCoreName(tempIndexName);
|
||||
swapRequest.setAction(CoreAdminParams.CoreAdminAction.SWAP);
|
||||
swapRequest.process(adminSolr);
|
||||
|
||||
try
|
||||
{
|
||||
// export from the actual core (from temp core name, actual data dir)
|
||||
exportIndex(indexName, exportDir, tempSolrUrl, timeField);
|
||||
|
||||
// clear actual core (temp core name, clearing actual data dir) & import
|
||||
importIndex(indexName, exportDir, tempSolrUrl, true, true);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
// we ran into some problems with the export/import -- keep going to try and restore the solr cores
|
||||
System.err.println("Encountered problem during reindex: " + e.getMessage() + ", will attempt to restore Solr cores");
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
|
||||
// commit changes
|
||||
HttpSolrServer origSolr = new HttpSolrServer(origSolrUrl);
|
||||
origSolr.commit();
|
||||
|
||||
// swap back (statistics now going to actual core name in actual data dir)
|
||||
swapRequest = new CoreAdminRequest();
|
||||
swapRequest.setCoreName(tempIndexName);
|
||||
swapRequest.setOtherCoreName(indexName);
|
||||
swapRequest.setAction(CoreAdminParams.CoreAdminAction.SWAP);
|
||||
swapRequest.process(adminSolr);
|
||||
|
||||
// export all docs from now-temp core into export directory -- this won't cause name collisions with the actual export
|
||||
// because the core name for the temporary export has -temp in it while the actual core doesn't
|
||||
exportIndex(tempIndexName, exportDir, tempSolrUrl, timeField);
|
||||
// ...and import them into the now-again-actual core *without* clearing
|
||||
importIndex(tempIndexName, exportDir, origSolrUrl, false, true);
|
||||
|
||||
// commit changes
|
||||
origSolr.commit();
|
||||
|
||||
// unload now-temp core (temp core name)
|
||||
CoreAdminRequest.unloadCore(tempIndexName, false, false, adminSolr);
|
||||
|
||||
// clean up temporary data dir if this method created it
|
||||
if (createdTempDataDir && tempDataDir.exists())
|
||||
{
|
||||
FileUtils.deleteDirectory(tempDataDir);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
// clean up export dir if appropriate
|
||||
if (!keepExport && createdExportDir && exportDir.exists())
|
||||
{
|
||||
FileUtils.deleteDirectory(exportDir);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Exports all documents in the given index to the specified target directory in batches of #ROWS_PER_FILE.
|
||||
* See #makeExportFilename for the file names that are generated.
|
||||
*
|
||||
* @param indexName The index to export.
|
||||
* @param toDir The target directory for the export. Will be created if it doesn't exist yet. The directory must be writeable.
|
||||
* @param solrUrl The solr URL for the index to export. Must not be null.
|
||||
* @param timeField The time field to use for sorting the export. Must not be null.
|
||||
* @throws SolrServerException if there is a problem with exporting the index.
|
||||
* @throws IOException if there is a problem creating the files or communicating with Solr.
|
||||
* @throws SolrImportExportException if there is a problem in communicating with Solr.
|
||||
*/
|
||||
public static void exportIndex(String indexName, File toDir, String solrUrl, String timeField)
|
||||
throws SolrServerException, SolrImportExportException, IOException {
|
||||
exportIndex(indexName, toDir, solrUrl, timeField, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Import previously exported documents (or externally created CSV files that have the appropriate structure) into the specified index.
|
||||
* @param indexName the index to import.
|
||||
* @param fromDir the source directory. Must exist and be readable.
|
||||
* The importer will look for files whose name starts with <pre>indexName</pre>
|
||||
* and ends with .csv (to match what is generated by #makeExportFilename).
|
||||
* @param solrUrl The solr URL for the index to export. Must not be null.
|
||||
* @param clear if true, clear the index before importing.
|
||||
* @param overwrite if true, skip _version_ field on import to disable Solr's optimistic concurrency functionality
|
||||
* @throws IOException if there is a problem reading the files or communicating with Solr.
|
||||
* @throws SolrServerException if there is a problem reading the files or communicating with Solr.
|
||||
* @throws SolrImportExportException if there is a problem communicating with Solr.
|
||||
*/
|
||||
public static void importIndex(final String indexName, File fromDir, String solrUrl, boolean clear, boolean overwrite)
|
||||
throws IOException, SolrServerException, SolrImportExportException
|
||||
{
|
||||
if (StringUtils.isBlank(solrUrl))
|
||||
{
|
||||
throw new SolrImportExportException("Could not construct solr URL for index" + indexName + ", aborting export.");
|
||||
}
|
||||
|
||||
if (!fromDir.exists() || !fromDir.canRead())
|
||||
{
|
||||
throw new SolrImportExportException("Source directory " + fromDir
|
||||
+ " doesn't exist or isn't readable, aborting export of index "
|
||||
+ indexName);
|
||||
}
|
||||
|
||||
HttpSolrServer solr = new HttpSolrServer(solrUrl);
|
||||
|
||||
// must get multivalue fields before clearing
|
||||
List<String> multivaluedFields = getMultiValuedFields(solr);
|
||||
|
||||
if (clear)
|
||||
{
|
||||
clearIndex(solrUrl);
|
||||
}
|
||||
|
||||
File[] files = fromDir.listFiles(new FilenameFilter()
|
||||
{
|
||||
@Override
|
||||
public boolean accept(File dir, String name)
|
||||
{
|
||||
return name.startsWith(indexName) && name.endsWith(".csv");
|
||||
}
|
||||
});
|
||||
|
||||
if (files == null || files.length == 0)
|
||||
{
|
||||
log.warn("No export files found in directory " + fromDir.getCanonicalPath() + " for index " + indexName);
|
||||
return;
|
||||
}
|
||||
|
||||
Arrays.sort(files);
|
||||
|
||||
for (File file : files)
|
||||
{
|
||||
log.info("Importing file " + file.getCanonicalPath());
|
||||
ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest("/update/csv");
|
||||
if (overwrite)
|
||||
{
|
||||
contentStreamUpdateRequest.setParam("skip", "_version_");
|
||||
}
|
||||
for (String mvField : multivaluedFields) {
|
||||
contentStreamUpdateRequest.setParam("f." + mvField + ".split", "true");
|
||||
contentStreamUpdateRequest.setParam("f." + mvField + ".escape", "\\");
|
||||
}
|
||||
contentStreamUpdateRequest.setParam("stream.contentType", "text/csv;charset=utf-8");
|
||||
contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
|
||||
contentStreamUpdateRequest.addFile(file, "text/csv;charset=utf-8");
|
||||
|
||||
solr.request(contentStreamUpdateRequest);
|
||||
}
|
||||
|
||||
solr.commit(true, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine the names of all multi-valued fields from the data in the index.
|
||||
* @param solr the solr server to query.
|
||||
* @return A list containing all multi-valued fields, or an empty list if none are found / there aren't any.
|
||||
*/
|
||||
private static List<String> getMultiValuedFields(HttpSolrServer solr)
|
||||
{
|
||||
List<String> result = new ArrayList<>();
|
||||
try
|
||||
{
|
||||
LukeRequest request = new LukeRequest();
|
||||
// this needs to be a non-schema request, otherwise we'll miss dynamic fields
|
||||
LukeResponse response = request.process(solr);
|
||||
Map<String, LukeResponse.FieldInfo> fields = response.getFieldInfo();
|
||||
for (LukeResponse.FieldInfo info : fields.values())
|
||||
{
|
||||
if (info.getSchema().contains(FieldFlag.MULTI_VALUED.getAbbreviation() + ""))
|
||||
{
|
||||
result.add(info.getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (IOException | SolrServerException e)
|
||||
{
|
||||
log.fatal("Cannot determine which fields are multi valued: " + e.getMessage(), e);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all documents from the Solr index with the given URL, then commit and optimise the index.
|
||||
*
|
||||
* @throws IOException if there is a problem in communicating with Solr.
|
||||
* @throws SolrServerException if there is a problem in communicating with Solr.
|
||||
* @param solrUrl URL of the Solr core to clear.
|
||||
*/
|
||||
public static void clearIndex(String solrUrl) throws IOException, SolrServerException
|
||||
{
|
||||
HttpSolrServer solr = new HttpSolrServer(solrUrl);
|
||||
solr.deleteByQuery("*:*");
|
||||
solr.commit();
|
||||
solr.optimize();
|
||||
}
|
||||
|
||||
/**
|
||||
* Exports documents from the given index to the specified target directory in batches of #ROWS_PER_FILE, starting at fromWhen (or all documents).
|
||||
* See #makeExportFilename for the file names that are generated.
|
||||
*
|
||||
* @param indexName The index to export.
|
||||
* @param toDir The target directory for the export. Will be created if it doesn't exist yet. The directory must be writeable.
|
||||
* @param solrUrl The solr URL for the index to export. Must not be null.
|
||||
* @param timeField The time field to use for sorting the export. Must not be null.
|
||||
* @param fromWhen Optionally, from when to export. See options for allowed values. If null or empty, all documents will be exported.
|
||||
* @throws SolrServerException if there is a problem with exporting the index.
|
||||
* @throws IOException if there is a problem creating the files or communicating with Solr.
|
||||
* @throws SolrImportExportException if there is a problem in communicating with Solr.
|
||||
*/
|
||||
public static void exportIndex(String indexName, File toDir, String solrUrl, String timeField, String fromWhen)
|
||||
throws SolrServerException, IOException, SolrImportExportException
|
||||
{
|
||||
if (StringUtils.isBlank(solrUrl))
|
||||
{
|
||||
throw new SolrImportExportException("Could not construct solr URL for index" + indexName + ", aborting export.");
|
||||
}
|
||||
|
||||
if (!toDir.exists() || !toDir.canWrite())
|
||||
{
|
||||
throw new SolrImportExportException("Target directory " + toDir
|
||||
+ " doesn't exist or is not writable, aborting export of index "
|
||||
+ indexName);
|
||||
}
|
||||
|
||||
HttpSolrServer solr = new HttpSolrServer(solrUrl);
|
||||
|
||||
SolrQuery query = new SolrQuery("*:*");
|
||||
if (StringUtils.isNotBlank(fromWhen))
|
||||
{
|
||||
String lastValueFilter = makeFilterQuery(timeField, fromWhen);
|
||||
if (StringUtils.isNotBlank(lastValueFilter))
|
||||
{
|
||||
query.addFilterQuery(lastValueFilter);
|
||||
}
|
||||
}
|
||||
|
||||
query.setRows(0);
|
||||
query.setGetFieldStatistics(timeField);
|
||||
Map<String, FieldStatsInfo> fieldInfo = solr.query(query).getFieldStatsInfo();
|
||||
if (fieldInfo == null || !fieldInfo.containsKey(timeField)) {
|
||||
log.warn("Cannot get earliest date, not exporting index " + indexName + ", time field " + timeField + ", from " + fromWhen);
|
||||
return;
|
||||
}
|
||||
FieldStatsInfo timeFieldInfo = fieldInfo.get(timeField);
|
||||
if (timeFieldInfo == null || timeFieldInfo.getMin() == null) {
|
||||
log.warn("Cannot get earliest date, not exporting index " + indexName + ", time field " + timeField + ", from " + fromWhen);
|
||||
return;
|
||||
}
|
||||
Date earliestTimestamp = (Date) timeFieldInfo.getMin();
|
||||
|
||||
query.setGetFieldStatistics(false);
|
||||
query.clearSorts();
|
||||
query.setRows(0);
|
||||
query.setFacet(true);
|
||||
query.add(FacetParams.FACET_RANGE, timeField);
|
||||
query.add(FacetParams.FACET_RANGE_START, SOLR_DATE_FORMAT.format(earliestTimestamp) + "/MONTH");
|
||||
query.add(FacetParams.FACET_RANGE_END, "NOW/MONTH+1MONTH");
|
||||
query.add(FacetParams.FACET_RANGE_GAP, "+1MONTH");
|
||||
query.setFacetMinCount(1);
|
||||
|
||||
List<RangeFacet.Count> monthFacets = solr.query(query).getFacetRanges().get(0).getCounts();
|
||||
|
||||
for (RangeFacet.Count monthFacet : monthFacets) {
|
||||
Date monthStartDate;
|
||||
String monthStart = monthFacet.getValue();
|
||||
try
|
||||
{
|
||||
monthStartDate = SOLR_DATE_FORMAT_NO_MS.parse(monthStart);
|
||||
}
|
||||
catch (java.text.ParseException e)
|
||||
{
|
||||
throw new SolrImportExportException("Could not read start of month batch as date: " + monthStart, e);
|
||||
}
|
||||
int docsThisMonth = monthFacet.getCount();
|
||||
|
||||
SolrQuery monthQuery = new SolrQuery("*:*");
|
||||
monthQuery.setRows(ROWS_PER_FILE);
|
||||
monthQuery.set("wt", "csv");
|
||||
monthQuery.set("fl", "*");
|
||||
|
||||
monthQuery.addFilterQuery(timeField + ":[" +monthStart + " TO " + monthStart + "+1MONTH]");
|
||||
|
||||
for (int i = 0; i < docsThisMonth; i+= ROWS_PER_FILE)
|
||||
{
|
||||
monthQuery.setStart(i);
|
||||
URL url = new URL(solrUrl + "/select?" + monthQuery.toString());
|
||||
|
||||
File file = new File(toDir.getCanonicalPath(), makeExportFilename(indexName, monthStartDate, docsThisMonth, i));
|
||||
if (file.createNewFile())
|
||||
{
|
||||
FileUtils.copyURLToFile(url, file);
|
||||
log.info("Exported batch " + i + " to " + file.getCanonicalPath());
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new SolrImportExportException("Could not create file " + file.getCanonicalPath()
|
||||
+ " while exporting index " + indexName
|
||||
+ ", month" + monthStart
|
||||
+ ", batch " + i);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a filter query that represents the export date range passed in as lastValue
|
||||
* @param timeField the time field to use for the date range
|
||||
* @param lastValue the requested date range, see options for acceptable values
|
||||
* @return a filter query representing the date range, or null if no suitable date range can be created.
|
||||
*/
|
||||
private static String makeFilterQuery(String timeField, String lastValue) {
|
||||
if ("m".equals(lastValue))
|
||||
{
|
||||
// export data from the previous month
|
||||
return timeField + ":[NOW/MONTH-1MONTH TO NOW/MONTH]";
|
||||
}
|
||||
|
||||
int days;
|
||||
if ("d".equals(lastValue))
|
||||
{
|
||||
days = 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
// other acceptable value: a number, specifying how many days back to export
|
||||
days = Integer.valueOf(lastValue); // TODO check value?
|
||||
}
|
||||
return timeField + ":[NOW/DAY-" + days + "DAYS TO " + SOLR_DATE_FORMAT.format(new Date()) + "]";
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the specified directory name or fall back to a default value.
|
||||
*
|
||||
* @param directoryValue a specific directory name. Optional.
|
||||
* @return directoryValue if given as a non-blank string. A default directory otherwise.
|
||||
*/
|
||||
private static String makeDirectoryName(String directoryValue)
|
||||
{
|
||||
if (StringUtils.isNotBlank(directoryValue))
|
||||
{
|
||||
return directoryValue;
|
||||
}
|
||||
return ConfigurationManager.getProperty("dspace.dir") + File.separator + "solr-export" + File.separator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a filename for the export batch.
|
||||
*
|
||||
* @param indexName The name of the index being exported.
|
||||
* @param exportStart The start timestamp of the export
|
||||
* @param totalRecords The total number of records in the export.
|
||||
* @param index The index of the current batch.
|
||||
* @return A file name that is appropriate to use for exporting the batch of data described by the parameters.
|
||||
*/
|
||||
private static String makeExportFilename(String indexName, Date exportStart, long totalRecords, int index)
|
||||
{
|
||||
String exportFileNumber = "";
|
||||
if (totalRecords > ROWS_PER_FILE) {
|
||||
exportFileNumber = StringUtils.leftPad("" + (index / ROWS_PER_FILE), (int) Math.ceil(Math.log10(totalRecords / ROWS_PER_FILE)), "0");
|
||||
}
|
||||
return indexName
|
||||
+ "_export_"
|
||||
+ EXPORT_DATE_FORMAT.format(exportStart)
|
||||
+ (StringUtils.isNotBlank(exportFileNumber) ? "_" + exportFileNumber : "")
|
||||
+ ".csv";
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the full URL for the specified index name.
|
||||
*
|
||||
* @param indexName the index name whose Solr URL is required. If the index name starts with
|
||||
* "statistics" or is "authority", the Solr base URL will be looked up
|
||||
* in the corresponding DSpace configuration file. Otherwise, it will fall back to a default.
|
||||
* @return the full URL to the Solr index, as a String.
|
||||
*/
|
||||
private static String makeSolrUrl(String indexName)
|
||||
{
|
||||
if (indexName.startsWith("statistics"))
|
||||
{
|
||||
// TODO account for year shards properly?
|
||||
return ConfigurationManager.getProperty("solr-statistics", "server") + indexName.replaceFirst("statistics", "");
|
||||
}
|
||||
else if ("authority".equals(indexName))
|
||||
{
|
||||
return ConfigurationManager.getProperty("solr.authority.server");
|
||||
}
|
||||
return "http://localhost:8080/solr/" + indexName; // TODO better default?
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a time field for the specified index name that is suitable for incremental export.
|
||||
*
|
||||
* @param indexName the index name whose Solr URL is required.
|
||||
* @return the name of the time field, or null if no suitable field can be determined.
|
||||
*/
|
||||
private static String makeTimeField(String indexName)
|
||||
{
|
||||
if (indexName.startsWith("statistics"))
|
||||
{
|
||||
return "time";
|
||||
}
|
||||
else if ("authority".equals(indexName))
|
||||
{
|
||||
return "last_modified_date";
|
||||
}
|
||||
return null; // TODO some sort of default?
|
||||
}
|
||||
|
||||
/**
|
||||
* A utility method to print out all available command-line options and exit given the specified code.
|
||||
*
|
||||
* @param options the supported options.
|
||||
* @param exitCode the exit code to use. The method will call System#exit(int) with the given code.
|
||||
*/
|
||||
private static void printHelpAndExit(Options options, int exitCode)
|
||||
{
|
||||
HelpFormatter myhelp = new HelpFormatter();
|
||||
myhelp.printHelp(SolrImportExport.class.getSimpleName() + "\n", options);
|
||||
System.exit(exitCode);
|
||||
}
|
||||
}
|
@@ -0,0 +1,24 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.util;
|
||||
|
||||
/**
|
||||
* @author Andrea Schweer schweer@waikato.ac.nz for the LCoNZ Institutional Research Repositories
|
||||
*/
|
||||
public class SolrImportExportException extends Exception
|
||||
{
|
||||
public SolrImportExportException(String message)
|
||||
{
|
||||
super(message);
|
||||
}
|
||||
|
||||
public SolrImportExportException(String message, Throwable cause)
|
||||
{
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
@@ -13,7 +13,10 @@ import org.dspace.core.Context;
|
||||
import org.dspace.storage.bitstore.BitstreamStorageManager;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import org.dspace.authorize.AuthorizeManager;
|
||||
import org.dspace.authorize.ResourcePolicy;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -46,13 +49,32 @@ public abstract class AbstractVersionProvider {
|
||||
for(Bundle nativeBundle : nativeItem.getBundles())
|
||||
{
|
||||
Bundle bundleNew = itemNew.createBundle(nativeBundle.getName());
|
||||
|
||||
// DSpace knows several types of resource policies (see the class
|
||||
// org.dspace.authorize.ResourcePolicy): Submission, Workflow, Custom
|
||||
// and inherited. Submission, Workflow and Inherited policies will be
|
||||
// set automatically as neccessary. We need to copy the custom policies
|
||||
// only to preserve customly set policies and embargos (which are
|
||||
// realized by custom policies with a start date).
|
||||
List<ResourcePolicy> bundlePolicies =
|
||||
AuthorizeManager.findPoliciesByDSOAndType(c, nativeBundle, ResourcePolicy.TYPE_CUSTOM);
|
||||
AuthorizeManager.addPolicies(c, bundlePolicies, bundleNew);
|
||||
|
||||
for(Bitstream nativeBitstream : nativeBundle.getBitstreams())
|
||||
{
|
||||
|
||||
Bitstream bitstreamNew = createBitstream(c, nativeBitstream);
|
||||
|
||||
bundleNew.addBitstream(bitstreamNew);
|
||||
|
||||
// NOTE: bundle.addBitstream() causes Bundle policies to be inherited by default.
|
||||
// So, we need to REMOVE any inherited TYPE_CUSTOM policies before copying over the correct ones.
|
||||
AuthorizeManager.removeAllPoliciesByDSOAndType(c, bitstreamNew, ResourcePolicy.TYPE_CUSTOM);
|
||||
|
||||
// Now, we need to copy the TYPE_CUSTOM resource policies from old bitstream
|
||||
// to the new bitstream, like we did above for bundles
|
||||
List<ResourcePolicy> bitstreamPolicies =
|
||||
AuthorizeManager.findPoliciesByDSOAndType(c, nativeBitstream, ResourcePolicy.TYPE_CUSTOM);
|
||||
AuthorizeManager.addPolicies(c, bitstreamPolicies, bitstreamNew);
|
||||
|
||||
if(nativeBundle.getPrimaryBitstreamID() == nativeBitstream.getID())
|
||||
{
|
||||
bundleNew.setPrimaryBitstreamID(bitstreamNew.getID());
|
||||
|
@@ -17,6 +17,9 @@ import org.dspace.utils.DSpace;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import org.dspace.authorize.AuthorizeManager;
|
||||
import org.dspace.authorize.ResourcePolicy;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -84,6 +87,15 @@ public class DefaultItemVersionProvider extends AbstractVersionProvider implemen
|
||||
} catch (IdentifierException e) {
|
||||
throw new RuntimeException("Can't create Identifier!");
|
||||
}
|
||||
// DSpace knows several types of resource policies (see the class
|
||||
// org.dspace.authorize.ResourcePolicy): Submission, Workflow, Custom
|
||||
// and inherited. Submission, Workflow and Inherited policies will be
|
||||
// set automatically as neccessary. We need to copy the custom policies
|
||||
// only to preserve customly set policies and embargos (which are
|
||||
// realized by custom policies with a start date).
|
||||
List<ResourcePolicy> policies =
|
||||
AuthorizeManager.findPoliciesByDSOAndType(c, previousItem, ResourcePolicy.TYPE_CUSTOM);
|
||||
AuthorizeManager.addPolicies(c, policies, itemNew);
|
||||
itemNew.update();
|
||||
return itemNew;
|
||||
}catch (SQLException e) {
|
||||
|
@@ -210,7 +210,7 @@ public class WorkflowManager
|
||||
{
|
||||
ArrayList<WorkflowItem> mylist = new ArrayList<WorkflowItem>();
|
||||
|
||||
String myquery = "SELECT * FROM WorkflowItem WHERE owner= ? ";
|
||||
String myquery = "SELECT * FROM WorkflowItem WHERE owner= ? ORDER BY workflow_id";
|
||||
|
||||
TableRowIterator tri = DatabaseManager.queryTable(c,
|
||||
"workflowitem", myquery,e.getID());
|
||||
@@ -246,7 +246,7 @@ public class WorkflowManager
|
||||
|
||||
String myquery = "SELECT workflowitem.* FROM workflowitem, TaskListItem" +
|
||||
" WHERE tasklistitem.eperson_id= ? " +
|
||||
" AND tasklistitem.workflow_id=workflowitem.workflow_id";
|
||||
" AND tasklistitem.workflow_id=workflowitem.workflow_id ORDER BY workflowitem.workflow_id";
|
||||
|
||||
TableRowIterator tri = DatabaseManager
|
||||
.queryTable(c, "workflowitem", myquery, e.getID());
|
||||
|
@@ -289,6 +289,8 @@ jsp.dspace-admin.general.eperson = EPerson
|
||||
jsp.dspace-admin.general.group = Group
|
||||
jsp.dspace-admin.general.group-colon = Group:
|
||||
jsp.dspace-admin.general.next.button = Next >
|
||||
jsp.dspace-admin.general.policy-end-date-colon = End Date:
|
||||
jsp.dspace-admin.general.policy-start-date-colon = Start Date:
|
||||
jsp.dspace-admin.general.remove = Remove
|
||||
jsp.dspace-admin.general.save = Save
|
||||
jsp.dspace-admin.general.update = Update
|
||||
|
@@ -568,7 +568,7 @@ public class DOIIdentifierProviderTest
|
||||
assumeNotNull(doiRow);
|
||||
|
||||
assertTrue("Reservation of DOI did not set the corret DOI status.",
|
||||
DOIIdentifierProvider.TO_BE_RESERVERED.intValue() == doiRow.getIntColumn("status"));
|
||||
DOIIdentifierProvider.TO_BE_RESERVED.intValue() == doiRow.getIntColumn("status"));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@@ -10,16 +10,22 @@ package org.dspace.identifier;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.UUID;
|
||||
import org.dspace.AbstractUnitTest;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.*;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.identifier.ezid.DateToYear;
|
||||
import org.dspace.identifier.ezid.Transform;
|
||||
import org.dspace.kernel.ServiceManager;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
import org.dspace.workflow.WorkflowManager;
|
||||
import org.junit.*;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
/**
|
||||
@@ -29,10 +35,22 @@ import static org.junit.Assert.*;
|
||||
public class EZIDIdentifierProviderTest
|
||||
extends AbstractUnitTest
|
||||
{
|
||||
/** Name of the reserved EZID test authority */
|
||||
/** Name of the reserved EZID test authority. */
|
||||
private static final String TEST_SHOULDER = "10.5072/FK2";
|
||||
|
||||
private static ServiceManager sm = null;
|
||||
/** A sensible metadata crosswalk. */
|
||||
private static final Map<String, String> aCrosswalk = new HashMap<>();
|
||||
static {
|
||||
aCrosswalk.put("datacite.creator", "dc.contributor.author");
|
||||
aCrosswalk.put("datacite.title", "dc.title");
|
||||
aCrosswalk.put("datacite.publisher", "dc.publisher");
|
||||
aCrosswalk.put("datacite.publicationyear", "dc.date.issued");
|
||||
}
|
||||
/** A sensible set of metadata transforms. */
|
||||
private static final Map<String, Transform> crosswalkTransforms = new HashMap();
|
||||
static {
|
||||
crosswalkTransforms.put("datacite.publicationyear", new DateToYear());
|
||||
}
|
||||
|
||||
private static ConfigurationService config = null;
|
||||
|
||||
@@ -49,6 +67,9 @@ public class EZIDIdentifierProviderTest
|
||||
|
||||
private static void dumpMetadata(Item eyetem)
|
||||
{
|
||||
if (null == eyetem)
|
||||
return;
|
||||
|
||||
Metadatum[] metadata = eyetem.getMetadata("dc", Item.ANY, Item.ANY, Item.ANY);
|
||||
for (Metadatum metadatum : metadata)
|
||||
System.out.printf("Metadata: %s.%s.%s(%s) = %s\n",
|
||||
@@ -89,30 +110,11 @@ public class EZIDIdentifierProviderTest
|
||||
return item;
|
||||
}
|
||||
|
||||
/*
|
||||
@BeforeClass
|
||||
public static void setUpClass()
|
||||
throws Exception
|
||||
{
|
||||
Context ctx = new Context();
|
||||
ctx.turnOffAuthorisationSystem();
|
||||
|
||||
ctx.setCurrentUser(eperson);
|
||||
|
||||
// Create an environment for our test objects to live in.
|
||||
community = Community.create(null, ctx);
|
||||
community.setMetadata("name", "A Test Community");
|
||||
community.update();
|
||||
|
||||
collection = community.createCollection();
|
||||
collection.setMetadata("name", "A Test Collection");
|
||||
collection.update();
|
||||
|
||||
ctx.complete();
|
||||
|
||||
// Find the usual kernel services
|
||||
sm = kernelImpl.getServiceManager();
|
||||
|
||||
config = kernelImpl.getConfigurationService();
|
||||
|
||||
// Configure the service under test.
|
||||
@@ -129,71 +131,67 @@ public class EZIDIdentifierProviderTest
|
||||
throws Exception
|
||||
{
|
||||
System.out.print("Tearing down\n\n");
|
||||
Context ctx = new Context();
|
||||
dumpMetadata(Item.find(ctx, itemID));
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setUp()
|
||||
throws Exception
|
||||
{
|
||||
context.setCurrentUser(eperson);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
// Create an environment for our test objects to live in.
|
||||
community = Community.create(null, context);
|
||||
community.setMetadata("name", "A Test Community");
|
||||
community.update();
|
||||
|
||||
collection = community.createCollection();
|
||||
collection.setMetadata("name", "A Test Collection");
|
||||
collection.update();
|
||||
|
||||
context.commit();
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown()
|
||||
throws SQLException
|
||||
{
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
*/
|
||||
|
||||
/** Dummy test. */
|
||||
@Test
|
||||
public void testNothing()
|
||||
{
|
||||
System.out.println("dummy");
|
||||
dumpMetadata(Item.find(context, itemID));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of supports method, of class DataCiteIdentifierProvider.
|
||||
*/
|
||||
/*
|
||||
@Test
|
||||
public void testSupports_Class()
|
||||
{
|
||||
System.out.println("supports Class");
|
||||
|
||||
EZIDIdentifierProvider instance
|
||||
= (EZIDIdentifierProvider)
|
||||
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
|
||||
Class<? extends Identifier> identifier = DOI.class;
|
||||
boolean result = instance.supports(identifier);
|
||||
assertTrue("DOI should be supported", result);
|
||||
assertTrue("DOI is supported", result);
|
||||
}
|
||||
*/
|
||||
|
||||
/**
|
||||
* Test of supports method, of class DataCiteIdentifierProvider.
|
||||
*/
|
||||
/*
|
||||
@Test
|
||||
public void testSupports_String()
|
||||
{
|
||||
System.out.println("supports String");
|
||||
|
||||
EZIDIdentifierProvider instance
|
||||
= (EZIDIdentifierProvider)
|
||||
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
|
||||
String identifier = "doi:" + TEST_SHOULDER;
|
||||
boolean result = instance.supports(identifier);
|
||||
assertTrue(identifier + " should be supported", result);
|
||||
assertTrue(identifier + " is supported", result);
|
||||
}
|
||||
*/
|
||||
|
||||
/**
|
||||
* Test of register method, of class DataCiteIdentifierProvider.
|
||||
* Test of register method, of class EZIDIdentifierProvider.
|
||||
*/
|
||||
/*
|
||||
@Test
|
||||
@@ -202,9 +200,7 @@ public class EZIDIdentifierProviderTest
|
||||
{
|
||||
System.out.println("register Context, DSpaceObject");
|
||||
|
||||
EZIDIdentifierProvider instance
|
||||
= (EZIDIdentifierProvider)
|
||||
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
|
||||
DSpaceObject dso = newItem(context);
|
||||
|
||||
@@ -224,9 +220,7 @@ public class EZIDIdentifierProviderTest
|
||||
{
|
||||
System.out.println("register 3");
|
||||
|
||||
EZIDIdentifierProvider instance
|
||||
= (EZIDIdentifierProvider)
|
||||
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
|
||||
DSpaceObject object = newItem(context);
|
||||
|
||||
@@ -246,9 +240,7 @@ public class EZIDIdentifierProviderTest
|
||||
{
|
||||
System.out.println("reserve");
|
||||
|
||||
EZIDIdentifierProvider instance
|
||||
= (EZIDIdentifierProvider)
|
||||
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
|
||||
DSpaceObject dso = newItem(context);
|
||||
String identifier = UUID.randomUUID().toString();
|
||||
@@ -266,13 +258,11 @@ public class EZIDIdentifierProviderTest
|
||||
{
|
||||
System.out.println("mint");
|
||||
|
||||
EZIDIdentifierProvider instance
|
||||
= (EZIDIdentifierProvider)
|
||||
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
|
||||
DSpaceObject dso = newItem(context);
|
||||
String result = instance.mint(context, dso);
|
||||
assertNotNull("Null returned", result);
|
||||
assertNotNull("Non-null returned", result);
|
||||
}
|
||||
*/
|
||||
|
||||
@@ -286,9 +276,7 @@ public class EZIDIdentifierProviderTest
|
||||
{
|
||||
System.out.println("resolve");
|
||||
|
||||
EZIDIdentifierProvider instance
|
||||
= (EZIDIdentifierProvider)
|
||||
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
|
||||
String identifier = UUID.randomUUID().toString();
|
||||
DSpaceObject expResult = newItem(context);
|
||||
@@ -310,9 +298,7 @@ public class EZIDIdentifierProviderTest
|
||||
{
|
||||
System.out.println("lookup");
|
||||
|
||||
EZIDIdentifierProvider instance
|
||||
= (EZIDIdentifierProvider)
|
||||
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
|
||||
String identifier = UUID.randomUUID().toString();
|
||||
DSpaceObject object = newItem(context);
|
||||
@@ -333,9 +319,7 @@ public class EZIDIdentifierProviderTest
|
||||
{
|
||||
System.out.println("delete 2");
|
||||
|
||||
EZIDIdentifierProvider instance
|
||||
= (EZIDIdentifierProvider)
|
||||
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
|
||||
DSpaceObject dso = newItem(context);
|
||||
|
||||
@@ -370,18 +354,16 @@ public class EZIDIdentifierProviderTest
|
||||
*/
|
||||
|
||||
/**
|
||||
* Test of delete method, of class DataCiteIdentifierProvider.
|
||||
* Test of delete method, of class EZIDIdentifierProvider.
|
||||
*/
|
||||
/*
|
||||
@Test()
|
||||
@Test
|
||||
public void testDelete_3args()
|
||||
throws Exception
|
||||
{
|
||||
System.out.println("delete 3");
|
||||
|
||||
EZIDIdentifierProvider instance
|
||||
= (EZIDIdentifierProvider)
|
||||
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
|
||||
DSpaceObject dso = newItem(context);
|
||||
String identifier = UUID.randomUUID().toString();
|
||||
@@ -400,4 +382,45 @@ public class EZIDIdentifierProviderTest
|
||||
assertFalse("Test identifier is still present", found.hasNext());
|
||||
}
|
||||
*/
|
||||
|
||||
/**
|
||||
* Test of crosswalkMetadata method, of class EZIDIdentifierProvider.
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test
|
||||
public void testCrosswalkMetadata()
|
||||
throws Exception
|
||||
{
|
||||
System.out.println("crosswalkMetadata");
|
||||
|
||||
// Set up the instance to be tested
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
instance.setConfigurationService(config);
|
||||
instance.setCrosswalk(aCrosswalk);
|
||||
instance.setCrosswalkTransform(crosswalkTransforms);
|
||||
|
||||
// Let's have a fresh Item to work with
|
||||
DSpaceObject dso = newItem(context);
|
||||
String handle = dso.getHandle();
|
||||
|
||||
// Test!
|
||||
Map<String, String> metadata = instance.crosswalkMetadata(dso);
|
||||
|
||||
// Evaluate
|
||||
String target = (String) metadata.get("_target");
|
||||
assertEquals("Generates correct _target metadatum",
|
||||
config.getProperty("dspace.url") + "/handle/" + handle,
|
||||
target);
|
||||
assertTrue("Has title", metadata.containsKey("datacite.title"));
|
||||
assertTrue("Has publication year", metadata.containsKey("datacite.publicationyear"));
|
||||
assertTrue("Has publisher", metadata.containsKey("datacite.publisher"));
|
||||
assertTrue("Has creator", metadata.containsKey("datacite.creator"));
|
||||
|
||||
// Dump out the generated metadata for inspection
|
||||
System.out.println("Results:");
|
||||
for (Entry metadatum : metadata.entrySet())
|
||||
{
|
||||
System.out.printf(" %s : %s\n", metadatum.getKey(), metadatum.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>5.0</version>
|
||||
<version>5.5</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@@ -225,6 +225,8 @@ public class DiscoverUtility
|
||||
String query = request.getParameter("query");
|
||||
if (StringUtils.isNotBlank(query))
|
||||
{
|
||||
// Escape any special characters in this user-entered query
|
||||
query = escapeQueryChars(query);
|
||||
queryArgs.setQuery(query);
|
||||
}
|
||||
|
||||
@@ -267,6 +269,19 @@ public class DiscoverUtility
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape colon-space sequence in a user-entered query, based on the
|
||||
* underlying search service. This is intended to let end users paste in a
|
||||
* title containing colon-space without requiring them to escape the colon.
|
||||
*
|
||||
* @param query user-entered query string
|
||||
* @return query with colon in colon-space sequence escaped
|
||||
*/
|
||||
private static String escapeQueryChars(String query)
|
||||
{
|
||||
return StringUtils.replace(query, ": ", "\\: ");
|
||||
}
|
||||
|
||||
private static void setPagination(HttpServletRequest request,
|
||||
DiscoverQuery queryArgs,
|
||||
DiscoveryConfiguration discoveryConfiguration)
|
||||
|
@@ -982,11 +982,13 @@ public class ItemTag extends TagSupport
|
||||
|
||||
if (tb != null)
|
||||
{
|
||||
String myPath = request.getContextPath()
|
||||
+ "/retrieve/"
|
||||
+ tb.getID()
|
||||
+ "/"
|
||||
+ UIUtil.encodeBitstreamName(tb
|
||||
if (AuthorizeManager.authorizeActionBoolean(context, tb, Constants.READ))
|
||||
{
|
||||
String myPath = request.getContextPath()
|
||||
+ "/retrieve/"
|
||||
+ tb.getID()
|
||||
+ "/"
|
||||
+ UIUtil.encodeBitstreamName(tb
|
||||
.getName(),
|
||||
Constants.DEFAULT_ENCODING);
|
||||
|
||||
@@ -995,6 +997,7 @@ public class ItemTag extends TagSupport
|
||||
out.print("<img src=\"" + myPath + "\" ");
|
||||
out.print("alt=\"" + tAltText
|
||||
+ "\" /></a><br />");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -63,30 +63,7 @@ public class SelectCollectionTag extends TagSupport
|
||||
{
|
||||
HttpServletRequest hrq = (HttpServletRequest) pageContext.getRequest();
|
||||
Context context = UIUtil.obtainContext(hrq);
|
||||
Map<Community, List<Collection>> commCollList = new LinkedHashMap<Community, List<Collection>>();
|
||||
|
||||
for (Community topcommunity : Community.findAllTop(context))
|
||||
{
|
||||
for (Collection collection : topcommunity.getCollections())
|
||||
{
|
||||
List<Collection> clist = null;
|
||||
if (commCollList.containsKey(topcommunity))
|
||||
{
|
||||
clist = commCollList.get(topcommunity);
|
||||
}
|
||||
else
|
||||
{
|
||||
clist = new ArrayList<Collection>();
|
||||
}
|
||||
clist.add(collection);
|
||||
commCollList.put(topcommunity, clist);
|
||||
}
|
||||
|
||||
for (Community subcommunity : topcommunity.getSubcommunities())
|
||||
{
|
||||
addCommCollList(subcommunity, commCollList);
|
||||
}
|
||||
}
|
||||
Collection[] collections = (Collection[]) hrq.getAttribute("collections");
|
||||
|
||||
sb.append("<select");
|
||||
if (name != null)
|
||||
@@ -109,22 +86,16 @@ public class SelectCollectionTag extends TagSupport
|
||||
if (collection == -1) sb.append(" selected=\"selected\"");
|
||||
sb.append(">").append(firstOption).append("</option>\n");
|
||||
|
||||
Iterator<Community> iter = commCollList.keySet().iterator();
|
||||
while(iter.hasNext())
|
||||
for (Collection coll : collections)
|
||||
{
|
||||
Community comm = iter.next();
|
||||
//sb.append("<optgroup label=\"").append(getCommName(comm)).append("\">\n");
|
||||
for (Collection coll : commCollList.get(comm))
|
||||
sb.append("<option value=\"").append(coll.getID()).append("\"");
|
||||
if (collection == coll.getID())
|
||||
{
|
||||
sb.append("<option value=\"").append(coll.getID()).append("\"");
|
||||
if (collection == coll.getID())
|
||||
{
|
||||
sb.append(" selected=\"selected\"");
|
||||
}
|
||||
sb.append(">").append(CollectionDropDown.collectionPath(coll)).append("</option>\n");
|
||||
sb.append(" selected=\"selected\"");
|
||||
}
|
||||
//sb.append("</optgroup>\n");
|
||||
sb.append(">").append(CollectionDropDown.collectionPath(coll)).append("</option>\n");
|
||||
}
|
||||
|
||||
sb.append("</select>\n");
|
||||
|
||||
out.print(sb.toString());
|
||||
@@ -141,45 +112,6 @@ public class SelectCollectionTag extends TagSupport
|
||||
return SKIP_BODY;
|
||||
}
|
||||
|
||||
private void addCommCollList(Community community, Map<Community,
|
||||
List<Collection>> commCollList) throws SQLException
|
||||
{
|
||||
for (Collection collection : community.getCollections())
|
||||
{
|
||||
List<Collection> clist = null;
|
||||
if (commCollList.containsKey(community))
|
||||
{
|
||||
clist = commCollList.get(community);
|
||||
}
|
||||
else
|
||||
{
|
||||
clist = new ArrayList<Collection>();
|
||||
}
|
||||
clist.add(collection);
|
||||
commCollList.put(community, clist);
|
||||
}
|
||||
|
||||
for (Community subcommunity : community.getSubcommunities())
|
||||
{
|
||||
addCommCollList(subcommunity, commCollList);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private String getCommName(Community community) throws SQLException
|
||||
{
|
||||
StringBuffer sb = new StringBuffer("");
|
||||
Community[] parents = community.getAllParents();
|
||||
for (Community parent : parents)
|
||||
{
|
||||
sb.insert(0, parent.getMetadata("name")+"/");
|
||||
}
|
||||
sb.append(community.getMetadata("name"));
|
||||
|
||||
return sb.toString().substring(1);
|
||||
}
|
||||
|
||||
|
||||
public String getKlass()
|
||||
{
|
||||
return klass;
|
||||
|
@@ -14,6 +14,7 @@ import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.app.webui.util.UIUtil;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
@@ -31,6 +32,7 @@ import org.dspace.content.Community;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.core.Utils;
|
||||
import org.dspace.discovery.configuration.TagCloudConfiguration;
|
||||
|
||||
/**
|
||||
@@ -88,6 +90,25 @@ public abstract class AbstractBrowserServlet extends DSpaceServlet
|
||||
String month = request.getParameter("month");
|
||||
String year = request.getParameter("year");
|
||||
String startsWith = request.getParameter("starts_with");
|
||||
//validate input to avoid cross-site scripting
|
||||
try {
|
||||
if (StringUtils.isNotBlank(month) && !"-1".equals(month)) {
|
||||
Integer.valueOf(month);
|
||||
}
|
||||
if (StringUtils.isNotBlank(year) && !"-1".equals(year)) {
|
||||
Integer.valueOf(year);
|
||||
}
|
||||
if(StringUtils.isNotBlank(startsWith)) {
|
||||
startsWith = Utils.addEntities(startsWith);
|
||||
}
|
||||
}
|
||||
catch(Exception ex) {
|
||||
log.warn("We were unable to parse the browse request: maybe a cross-site scripting attach?");
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
|
||||
String valueFocus = request.getParameter("vfocus");
|
||||
String valueFocusLang = request.getParameter("vfocus_lang");
|
||||
String authority = request.getParameter("authority");
|
||||
@@ -110,12 +131,14 @@ public abstract class AbstractBrowserServlet extends DSpaceServlet
|
||||
|
||||
// process the input, performing some inline validation
|
||||
BrowseIndex bi = null;
|
||||
if (type != null && !"".equals(type))
|
||||
if (StringUtils.isNotEmpty(type))
|
||||
{
|
||||
bi = BrowseIndex.getBrowseIndex(type);
|
||||
}
|
||||
|
||||
if (bi == null)
|
||||
// don't override a requested index, if no index is set,
|
||||
// try to find it on a possibly specified sort option.
|
||||
if (type == null && bi == null)
|
||||
{
|
||||
if (sortBy > 0)
|
||||
{
|
||||
@@ -168,7 +191,7 @@ public abstract class AbstractBrowserServlet extends DSpaceServlet
|
||||
}
|
||||
|
||||
// if no resultsperpage set, default to 20 - if tag cloud enabled, leave it as is!
|
||||
if (resultsperpage < 0 && !bi.isTagCloudEnabled())
|
||||
if (bi != null && resultsperpage < 0 && !bi.isTagCloudEnabled())
|
||||
{
|
||||
resultsperpage = 20;
|
||||
}
|
||||
|
@@ -65,9 +65,16 @@ public class BrowserServlet extends AbstractBrowserServlet
|
||||
// all browse requests currently come to GET.
|
||||
BrowserScope scope = getBrowserScopeForRequest(context, request, response);
|
||||
|
||||
if (scope.getBrowseIndex() == null)
|
||||
if (scope == null || scope.getBrowseIndex() == null)
|
||||
{
|
||||
throw new ServletException("There is no browse index for the request");
|
||||
String requestURL = request.getRequestURI();
|
||||
if (request.getQueryString() != null)
|
||||
{
|
||||
requestURL += "?" + request.getQueryString();
|
||||
}
|
||||
log.warn("We were unable to parse the browse request (e.g. an unconfigured index or sort option was used). Will send a 400 Bad Request. Requested URL was: " + requestURL);
|
||||
response.sendError(HttpServletResponse.SC_BAD_REQUEST);
|
||||
return;
|
||||
}
|
||||
|
||||
// Is this a request to export the metadata, or a normal browse request?
|
||||
|
@@ -55,7 +55,7 @@ public class DisplayStatisticsServlet extends DSpaceServlet
|
||||
{
|
||||
|
||||
// is the statistics data publically viewable?
|
||||
boolean privatereport = ConfigurationManager.getBooleanProperty("usage-statistics", "authorization.admin");
|
||||
boolean privatereport = ConfigurationManager.getBooleanProperty("usage-statistics", "authorization.admin.usage");
|
||||
|
||||
// is the user a member of the Administrator (1) group?
|
||||
boolean admin = Group.isMember(context, 1);
|
||||
|
@@ -19,6 +19,7 @@ import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.commons.lang.StringEscapeUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.app.util.GoogleMetadata;
|
||||
import org.dspace.app.webui.util.Authenticate;
|
||||
@@ -129,8 +130,7 @@ public class HandleServlet extends DSpaceServlet
|
||||
|
||||
if (dso == null)
|
||||
{
|
||||
log.info(LogManager
|
||||
.getHeader(context, "invalid_id", "path=" + path));
|
||||
log.info(LogManager.getHeader(context, "invalid_id", "path=" + path));
|
||||
JSPManager.showInvalidIDError(request, response, StringEscapeUtils.escapeHtml(path), -1);
|
||||
|
||||
return;
|
||||
@@ -152,8 +152,48 @@ public class HandleServlet extends DSpaceServlet
|
||||
// and firing a usage event for the DSO we're reporting for
|
||||
return;
|
||||
|
||||
} else if ("/display-statistics.jsp".equals(extraPathInfo))
|
||||
{
|
||||
request.getRequestDispatcher(extraPathInfo).forward(request, response);
|
||||
// If we don't return here, we keep processing and end up
|
||||
// throwing a NPE when checking community authorization
|
||||
// and firing a usage event for the DSO we're reporting for
|
||||
return;
|
||||
} else if ("/browse".equals((extraPathInfo)) || StringUtils.startsWith(extraPathInfo, "/browse?")) {
|
||||
// Add the location if we got a community or collection
|
||||
if (dso instanceof Community)
|
||||
{
|
||||
Community c = (Community) dso;
|
||||
request.setAttribute("dspace.community", c);
|
||||
} else if (dso instanceof Collection)
|
||||
{
|
||||
Collection c = (Collection) dso;
|
||||
request.setAttribute("dspace.collection", c);
|
||||
}
|
||||
request.getRequestDispatcher(extraPathInfo).forward(request, response);
|
||||
// If we don't return here, we keep processing and end up
|
||||
// throwing a NPE when checking community authorization
|
||||
// and firing a usage event for the DSO we're reporting for
|
||||
return;
|
||||
} else if ("/simple-search".equals(extraPathInfo) || StringUtils.startsWith(extraPathInfo, "simple-search?")) {
|
||||
// Add the location if we got a community or collection
|
||||
if (dso instanceof Community)
|
||||
{
|
||||
Community c = (Community) dso;
|
||||
request.setAttribute("dspace.community", c);
|
||||
} else if (dso instanceof Collection)
|
||||
{
|
||||
Collection c = (Collection) dso;
|
||||
request.setAttribute("dspace.collection", c);
|
||||
}
|
||||
request.getRequestDispatcher(extraPathInfo).forward(request, response);
|
||||
// If we don't return here, we keep processing and end up
|
||||
// throwing a NPE when checking community authorization
|
||||
// and firing a usage event for the DSO we're reporting for
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
// OK, we have a valid Handle. What is it?
|
||||
if (dso.getType() == Constants.ITEM)
|
||||
{
|
||||
@@ -195,9 +235,9 @@ public class HandleServlet extends DSpaceServlet
|
||||
}
|
||||
else
|
||||
{
|
||||
// Forward to another servlet
|
||||
request.getRequestDispatcher(extraPathInfo).forward(request,
|
||||
response);
|
||||
log.debug("Found Item with extraPathInfo => Error.");
|
||||
JSPManager.showInvalidIDError(request, response, StringEscapeUtils.escapeHtml(path), -1);
|
||||
return;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -231,9 +271,9 @@ public class HandleServlet extends DSpaceServlet
|
||||
}
|
||||
else
|
||||
{
|
||||
// Forward to another servlet
|
||||
request.getRequestDispatcher(extraPathInfo).forward(request,
|
||||
response);
|
||||
log.debug("Found Collection with extraPathInfo => Error.");
|
||||
JSPManager.showInvalidIDError(request, response, StringEscapeUtils.escapeHtml(path), -1);
|
||||
return;
|
||||
}
|
||||
}
|
||||
else if (dso.getType() == Constants.COMMUNITY)
|
||||
@@ -255,9 +295,9 @@ public class HandleServlet extends DSpaceServlet
|
||||
}
|
||||
else
|
||||
{
|
||||
// Forward to another servlet
|
||||
request.getRequestDispatcher(extraPathInfo).forward(request,
|
||||
response);
|
||||
log.debug("Found Community with extraPathInfo => Error.");
|
||||
JSPManager.showInvalidIDError(request, response, StringEscapeUtils.escapeHtml(path), -1);
|
||||
return;
|
||||
}
|
||||
}
|
||||
else
|
||||
|
@@ -9,6 +9,7 @@ package org.dspace.app.webui.servlet.admin;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@@ -16,6 +17,7 @@ import java.util.Map;
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import org.apache.commons.lang.time.DateUtils;
|
||||
|
||||
import org.dspace.app.util.AuthorizeUtil;
|
||||
import org.dspace.app.webui.servlet.DSpaceServlet;
|
||||
@@ -493,6 +495,20 @@ public class AuthorizeAdminServlet extends DSpaceServlet
|
||||
.getIntParameter(request, "collection_id");
|
||||
int communityId = UIUtil.getIntParameter(request, "community_id");
|
||||
int itemId = UIUtil.getIntParameter(request, "item_id");
|
||||
Date startDate = null;
|
||||
try {
|
||||
startDate = DateUtils.parseDate(request.getParameter("policy_start_date"),
|
||||
new String[]{"yyyy-MM-dd", "yyyy-MM", "yyyy"});
|
||||
} catch (Exception ex) {
|
||||
//Ignore start date is already null
|
||||
}
|
||||
Date endDate = null;
|
||||
try {
|
||||
endDate = DateUtils.parseDate(request.getParameter("policy_end_date"),
|
||||
new String[]{"yyyy-MM-dd", "yyyy-MM", "yyyy"});
|
||||
} catch (Exception ex) {
|
||||
//Ignore end date is already null
|
||||
}
|
||||
|
||||
Item item = null;
|
||||
Collection collection = null;
|
||||
@@ -574,6 +590,11 @@ public class AuthorizeAdminServlet extends DSpaceServlet
|
||||
// modify the policy
|
||||
policy.setAction(actionId);
|
||||
policy.setGroup(group);
|
||||
// start and end dates are used for Items and Bitstreams only.
|
||||
// Set start and end date even if they are null to be able to
|
||||
// delete previously set dates.
|
||||
policy.setStartDate(startDate);
|
||||
policy.setEndDate(endDate);
|
||||
policy.update();
|
||||
|
||||
// show edit form!
|
||||
|
@@ -127,7 +127,11 @@
|
||||
</filter-mapping>
|
||||
|
||||
|
||||
<!-- kernel start listener (from impl), starts up the kernel for standalong webapps -->
|
||||
<!--
|
||||
DSpace Kernel startup listener. This listener is in charge of initializing/starting the
|
||||
DSpace Kernel. It MUST be listed BEFORE any other DSpace listeners, as DSpace services
|
||||
will not function until the Kernel is initialized.
|
||||
-->
|
||||
<listener>
|
||||
<listener-class>org.dspace.servicemanager.servlet.DSpaceKernelServletContextListener</listener-class>
|
||||
</listener>
|
||||
|
@@ -38,6 +38,7 @@
|
||||
<%@ page import="org.dspace.content.*"%>
|
||||
<%@ page import="org.dspace.core.ConfigurationManager"%>
|
||||
<%@ page import="org.dspace.core.Context" %>
|
||||
<%@ page import="org.dspace.core.Utils" %>
|
||||
<%@ page import="org.dspace.eperson.Group" %>
|
||||
<%@ page import="javax.servlet.jsp.jstl.fmt.LocaleSupport" %>
|
||||
<%@ page import="java.net.URLEncoder" %>
|
||||
@@ -377,7 +378,7 @@
|
||||
{
|
||||
if (dcv.length > 0)
|
||||
{
|
||||
displayTitle = dcv[0].value;
|
||||
displayTitle = Utils.addEntities(dcv[0].value);
|
||||
}
|
||||
}
|
||||
%><p class="recentItem"><a href="<%= request.getContextPath() %>/handle/<%= items[i].getHandle() %>"><%= displayTitle %></a></p><%
|
||||
|
@@ -32,6 +32,7 @@
|
||||
<%@ page import="org.dspace.browse.ItemCounter" %>
|
||||
<%@ page import="org.dspace.content.*" %>
|
||||
<%@ page import="org.dspace.core.ConfigurationManager" %>
|
||||
<%@ page import="org.dspace.core.Utils" %>
|
||||
<%@ page import="javax.servlet.jsp.jstl.fmt.LocaleSupport" %>
|
||||
|
||||
<%
|
||||
@@ -157,7 +158,7 @@
|
||||
{
|
||||
if (dcv.length > 0)
|
||||
{
|
||||
displayTitle = dcv[0].value;
|
||||
displayTitle = Utils.addEntities(dcv[0].value);
|
||||
}
|
||||
}
|
||||
%>
|
||||
|
@@ -14,6 +14,7 @@
|
||||
The add-on may be turn off in dspace.cfg
|
||||
--%>
|
||||
|
||||
<%@page import="org.dspace.core.Utils"%>
|
||||
<%@ page contentType="text/html;charset=UTF-8" %>
|
||||
<%@ page import="javax.servlet.jsp.jstl.fmt.LocaleSupport" %>
|
||||
<%@ taglib uri="/WEB-INF/dspace-tags.tld" prefix="dspace" %>
|
||||
@@ -52,7 +53,7 @@
|
||||
<form name="filterVocabulary" method="post" action="<%= request.getContextPath() %>/subject-search">
|
||||
<input style="border-width:1px;border-style:solid;"
|
||||
name="filter" type="text" id="filter"
|
||||
size="15" value="<%= filter %>"
|
||||
size="15" value="<%= Utils.addEntities(filter) %>"
|
||||
title="<%= LocaleSupport.getLocalizedMessage(pageContext, "jsp.controlledvocabulary.search.trimmessage") %>"/>
|
||||
<input type="submit" name="submit" value="<%= LocaleSupport.getLocalizedMessage(pageContext, "jsp.controlledvocabulary.search.trimbutton") %>"/>
|
||||
<input type="hidden" name="action" value="filter"/>
|
||||
|
@@ -8,6 +8,7 @@
|
||||
|
||||
--%>
|
||||
|
||||
<%@page import="org.apache.commons.lang.time.DateFormatUtils"%>
|
||||
<%--
|
||||
- policy editor - for new or existing policies
|
||||
-
|
||||
@@ -25,8 +26,8 @@
|
||||
- "id_name" - name/value passed in from id_name/id above
|
||||
- group_id - set if user selected a group
|
||||
- eperson_id - set if user selected an eperson
|
||||
- start_date - not set, unused
|
||||
- end_date - not set, unused
|
||||
- start_date - start date of a policy (e.g. for embargo feature)
|
||||
- end_date - end date of a policy
|
||||
- action_id - set to whatever user chose
|
||||
- (new policy) - set to a the string passed in above if policy is a new one
|
||||
--%>
|
||||
@@ -81,10 +82,10 @@
|
||||
|
||||
<form action="<%= request.getContextPath() %>/tools/authorize" method="post">
|
||||
|
||||
<div class="input-group">
|
||||
<span class="col-md-2">
|
||||
<%-- <td>Group:</td> --%>
|
||||
<label for="tgroup_id"><fmt:message key="jsp.dspace-admin.general.group-colon"/></label>
|
||||
<div class="input-group">
|
||||
<span class="col-md-2">
|
||||
<%-- <td>Group:</td> --%>
|
||||
<label for="tgroup_id"><fmt:message key="jsp.dspace-admin.general.group-colon"/></label>
|
||||
</span>
|
||||
<span class="col-md-10">
|
||||
<select class="form-control" size="15" name="group_id" id="tgroup_id">
|
||||
@@ -101,24 +102,46 @@
|
||||
<label for="taction_id"><fmt:message key="jsp.dspace-admin.general.action-colon"/></label>
|
||||
</span>
|
||||
<span class="col-md-10">
|
||||
<input type="hidden" name="<%=id_name%>" value="<%=id%>" />
|
||||
<input type="hidden" name="policy_id" value="<%=policy.getID()%>" />
|
||||
<select class="form-control" name="action_id" id="taction_id">
|
||||
<% for( int i = 0; i < Constants.actionText.length; i++ )
|
||||
{
|
||||
// only display if action i is relevant
|
||||
// to resource type resourceRelevance
|
||||
if( (Constants.actionTypeRelevance[i]&resourceRelevance) > 0)
|
||||
{ %>
|
||||
<option value="<%= i %>"
|
||||
<%=(policy.getAction() == i ? "selected=\"selected\"" : "")%>>
|
||||
<%= Constants.actionText[i]%>
|
||||
</option>
|
||||
<% }
|
||||
} %>
|
||||
</select>
|
||||
</span>
|
||||
</div>
|
||||
<input type="hidden" name="<%=id_name%>" value="<%=id%>" />
|
||||
<input type="hidden" name="policy_id" value="<%=policy.getID()%>" />
|
||||
<select class="form-control" name="action_id" id="taction_id">
|
||||
<% for( int i = 0; i < Constants.actionText.length; i++ )
|
||||
{
|
||||
// only display if action i is relevant
|
||||
// to resource type resourceRelevance
|
||||
if( (Constants.actionTypeRelevance[i]&resourceRelevance) > 0)
|
||||
{ %>
|
||||
<option value="<%= i %>"
|
||||
<%=(policy.getAction() == i ? "selected=\"selected\"" : "")%>>
|
||||
<%= Constants.actionText[i]%>
|
||||
</option>
|
||||
<% }
|
||||
} %>
|
||||
</select>
|
||||
</span>
|
||||
<%
|
||||
// start and end dates are used for Items and Bitstreams only.
|
||||
if (resourceType == Constants.ITEM || resourceType == Constants.BITSTREAM)
|
||||
{
|
||||
%>
|
||||
<!-- policy start date -->
|
||||
<span class="col-md-2">
|
||||
<label for="t_start_date_id"><fmt:message key="jsp.dspace-admin.general.policy-start-date-colon"/></label>
|
||||
</span>
|
||||
<span class="col-md-10">
|
||||
<input class="form-control" name="policy_start_date" maxlength="10" size="10" type="text"
|
||||
value="<%= policy.getStartDate() != null ? DateFormatUtils.format(policy.getStartDate(), "yyyy-MM-dd") : "" %>" />
|
||||
</span>
|
||||
<!-- policy end date -->
|
||||
<span class="col-md-2">
|
||||
<label for="t_end_date_id"><fmt:message key="jsp.dspace-admin.general.policy-end-date-colon"/></label>
|
||||
</span>
|
||||
<span class="col-md-10">
|
||||
<input class="form-control" name="policy_end_date" maxlength="10" size="10" type="text"
|
||||
value="<%= policy.getEndDate() != null ? DateFormatUtils.format(policy.getEndDate(), "yyyy-MM-dd") : "" %>" />
|
||||
</span>
|
||||
<%} // if Item||Bitstream%>
|
||||
</div>
|
||||
<% if( newpolicy != null ) { %> <input name="newpolicy" type="hidden" value="<%=newpolicy%>"/> <% } %>
|
||||
|
||||
<div class="btn-group pull-right col-md-2">
|
||||
|
@@ -15,6 +15,7 @@
|
||||
- recent.submissions - RecetSubmissions
|
||||
--%>
|
||||
|
||||
<%@page import="org.dspace.core.Utils"%>
|
||||
<%@page import="org.dspace.content.Bitstream"%>
|
||||
<%@ page contentType="text/html;charset=UTF-8" %>
|
||||
|
||||
@@ -113,13 +114,13 @@ if (submissions != null && submissions.count() > 0)
|
||||
String displayTitle = "Untitled";
|
||||
if (dcv != null & dcv.length > 0)
|
||||
{
|
||||
displayTitle = dcv[0].value;
|
||||
displayTitle = Utils.addEntities(dcv[0].value);
|
||||
}
|
||||
dcv = item.getMetadata("dc", "description", "abstract", Item.ANY);
|
||||
String displayAbstract = "";
|
||||
if (dcv != null & dcv.length > 0)
|
||||
{
|
||||
displayAbstract = dcv[0].value;
|
||||
displayAbstract = Utils.addEntities(dcv[0].value);
|
||||
}
|
||||
%>
|
||||
<div style="padding-bottom: 50px; min-height: 200px;" class="item <%= first?"active":""%>">
|
||||
|
@@ -106,7 +106,7 @@
|
||||
{
|
||||
String key = "jsp.search.advanced.type." + index;
|
||||
%>
|
||||
<option value="<%= index %>" <%= field1.equals(index) ? "selected=\"selected\"" : "" %>><fmt:message key="<%= key %>"/></option>
|
||||
<option value="<%= StringEscapeUtils.escapeHtml(index) %>" <%= field1.equals(index) ? "selected=\"selected\"" : "" %>><fmt:message key="<%= key %>"/></option>
|
||||
<%
|
||||
}
|
||||
%>
|
||||
@@ -136,7 +136,7 @@
|
||||
{
|
||||
String key = "jsp.search.advanced.type." + index;
|
||||
%>
|
||||
<option value="<%= index %>" <%= field2.equals(index) ? "selected=\"selected\"" : "" %>><fmt:message key="<%= key %>"/></option>
|
||||
<option value="<%= StringEscapeUtils.escapeHtml(index) %>" <%= field2.equals(index) ? "selected=\"selected\"" : "" %>><fmt:message key="<%= key %>"/></option>
|
||||
<%
|
||||
}
|
||||
%>
|
||||
@@ -162,7 +162,7 @@
|
||||
{
|
||||
String key = "jsp.search.advanced.type." + index;
|
||||
%>
|
||||
<option value="<%= index %>" <%= field3.equals(index) ? "selected=\"selected\"" : "" %>><fmt:message key="<%= key %>"/></option>
|
||||
<option value="<%= StringEscapeUtils.escapeHtml(index) %>" <%= field3.equals(index) ? "selected=\"selected\"" : "" %>><fmt:message key="<%= key %>"/></option>
|
||||
<%
|
||||
}
|
||||
%>
|
||||
|
@@ -33,6 +33,8 @@
|
||||
- admin_button - If the user is an admin
|
||||
--%>
|
||||
|
||||
<%@page import="org.dspace.core.Utils"%>
|
||||
<%@page import="com.coverity.security.Escape"%>
|
||||
<%@page import="org.dspace.discovery.configuration.DiscoverySearchFilterFacet"%>
|
||||
<%@page import="org.dspace.app.webui.util.UIUtil"%>
|
||||
<%@page import="java.util.HashMap"%>
|
||||
@@ -55,7 +57,6 @@
|
||||
prefix="c" %>
|
||||
|
||||
<%@ taglib uri="http://www.dspace.org/dspace-tags.tld" prefix="dspace" %>
|
||||
<%@ page import="org.apache.commons.lang.StringEscapeUtils" %>
|
||||
<%@ page import="java.net.URLEncoder" %>
|
||||
<%@ page import="org.dspace.content.Community" %>
|
||||
<%@ page import="org.dspace.content.Collection" %>
|
||||
@@ -192,14 +193,14 @@
|
||||
}
|
||||
%> </select><br/>
|
||||
<label for="query"><fmt:message key="jsp.search.results.searchfor"/></label>
|
||||
<input type="text" size="50" id="query" name="query" value="<%= (query==null ? "" : StringEscapeUtils.escapeHtml(query)) %>"/>
|
||||
<input type="text" size="50" id="query" name="query" value="<%= (query==null ? "" : Utils.addEntities(query)) %>"/>
|
||||
<input type="submit" id="main-query-submit" class="btn btn-primary" value="<fmt:message key="jsp.general.go"/>" />
|
||||
<% if (StringUtils.isNotBlank(spellCheckQuery)) {%>
|
||||
<p class="lead"><fmt:message key="jsp.search.didyoumean"><fmt:param><a id="spellCheckQuery" data-spell="<%= StringEscapeUtils.escapeHtml(spellCheckQuery) %>" href="#"><%= spellCheckQuery %></a></fmt:param></fmt:message></p>
|
||||
<p class="lead"><fmt:message key="jsp.search.didyoumean"><fmt:param><a id="spellCheckQuery" data-spell="<%= Utils.addEntities(spellCheckQuery) %>" href="#"><%= spellCheckQuery %></a></fmt:param></fmt:message></p>
|
||||
<% } %>
|
||||
<input type="hidden" value="<%= rpp %>" name="rpp" />
|
||||
<input type="hidden" value="<%= sortedBy %>" name="sort_by" />
|
||||
<input type="hidden" value="<%= order %>" name="order" />
|
||||
<input type="hidden" value="<%= Utils.addEntities(sortedBy) %>" name="sort_by" />
|
||||
<input type="hidden" value="<%= Utils.addEntities(order) %>" name="order" />
|
||||
<% if (appliedFilters.size() > 0 ) { %>
|
||||
<div class="discovery-search-appliedFilters">
|
||||
<span><fmt:message key="jsp.search.filter.applied" /></span>
|
||||
@@ -213,8 +214,8 @@
|
||||
<%
|
||||
for (DiscoverySearchFilter searchFilter : availableFilters)
|
||||
{
|
||||
String fkey = "jsp.search.filter."+searchFilter.getIndexFieldName();
|
||||
%><option value="<%= searchFilter.getIndexFieldName() %>"<%
|
||||
String fkey = "jsp.search.filter." + Escape.uriParam(searchFilter.getIndexFieldName());
|
||||
%><option value="<%= Utils.addEntities(searchFilter.getIndexFieldName()) %>"<%
|
||||
if (filter[0].equals(searchFilter.getIndexFieldName()))
|
||||
{
|
||||
%> selected="selected"<%
|
||||
@@ -224,8 +225,8 @@
|
||||
}
|
||||
if (!found)
|
||||
{
|
||||
String fkey = "jsp.search.filter."+filter[0];
|
||||
%><option value="<%= filter[0] %>" selected="selected"><fmt:message key="<%= fkey %>"/></option><%
|
||||
String fkey = "jsp.search.filter." + Escape.uriParam(filter[0]);
|
||||
%><option value="<%= Utils.addEntities(filter[0]) %>" selected="selected"><fmt:message key="<%= fkey %>"/></option><%
|
||||
}
|
||||
%>
|
||||
</select>
|
||||
@@ -233,12 +234,12 @@
|
||||
<%
|
||||
for (String opt : options)
|
||||
{
|
||||
String fkey = "jsp.search.filter.op."+opt;
|
||||
%><option value="<%= opt %>"<%= opt.equals(filter[1])?" selected=\"selected\"":"" %>><fmt:message key="<%= fkey %>"/></option><%
|
||||
String fkey = "jsp.search.filter.op." + Escape.uriParam(opt);
|
||||
%><option value="<%= Utils.addEntities(opt) %>"<%= opt.equals(filter[1])?" selected=\"selected\"":"" %>><fmt:message key="<%= fkey %>"/></option><%
|
||||
}
|
||||
%>
|
||||
</select>
|
||||
<input type="text" id="filter_value_<%=idx %>" name="filter_value_<%=idx %>" value="<%= StringEscapeUtils.escapeHtml(filter[2]) %>" size="45"/>
|
||||
<input type="text" id="filter_value_<%=idx %>" name="filter_value_<%=idx %>" value="<%= Utils.addEntities(filter[2]) %>" size="45"/>
|
||||
<input class="btn btn-default" type="submit" id="submit_filter_remove_<%=idx %>" name="submit_filter_remove_<%=idx %>" value="X" />
|
||||
<br/>
|
||||
<%
|
||||
@@ -255,17 +256,17 @@
|
||||
<h5><fmt:message key="jsp.search.filter.heading" /></h5>
|
||||
<p class="discovery-search-filters-hint"><fmt:message key="jsp.search.filter.hint" /></p>
|
||||
<form action="simple-search" method="get">
|
||||
<input type="hidden" value="<%= StringEscapeUtils.escapeHtml(searchScope) %>" name="location" />
|
||||
<input type="hidden" value="<%= StringEscapeUtils.escapeHtml(query) %>" name="query" />
|
||||
<input type="hidden" value="<%= Utils.addEntities(searchScope) %>" name="location" />
|
||||
<input type="hidden" value="<%= Utils.addEntities(query) %>" name="query" />
|
||||
<% if (appliedFilterQueries.size() > 0 ) {
|
||||
int idx = 1;
|
||||
for (String[] filter : appliedFilters)
|
||||
{
|
||||
boolean found = false;
|
||||
%>
|
||||
<input type="hidden" id="filter_field_<%=idx %>" name="filter_field_<%=idx %>" value="<%= filter[0] %>" />
|
||||
<input type="hidden" id="filter_type_<%=idx %>" name="filter_type_<%=idx %>" value="<%= filter[1] %>" />
|
||||
<input type="hidden" id="filter_value_<%=idx %>" name="filter_value_<%=idx %>" value="<%= StringEscapeUtils.escapeHtml(filter[2]) %>" />
|
||||
<input type="hidden" id="filter_field_<%=idx %>" name="filter_field_<%=idx %>" value="<%= Utils.addEntities(filter[0]) %>" />
|
||||
<input type="hidden" id="filter_type_<%=idx %>" name="filter_type_<%=idx %>" value="<%= Utils.addEntities(filter[1]) %>" />
|
||||
<input type="hidden" id="filter_value_<%=idx %>" name="filter_value_<%=idx %>" value="<%= Utils.addEntities(filter[2]) %>" />
|
||||
<%
|
||||
idx++;
|
||||
}
|
||||
@@ -274,8 +275,8 @@
|
||||
<%
|
||||
for (DiscoverySearchFilter searchFilter : availableFilters)
|
||||
{
|
||||
String fkey = "jsp.search.filter."+searchFilter.getIndexFieldName();
|
||||
%><option value="<%= searchFilter.getIndexFieldName() %>"><fmt:message key="<%= fkey %>"/></option><%
|
||||
String fkey = "jsp.search.filter." + Escape.uriParam(searchFilter.getIndexFieldName());
|
||||
%><option value="<%= Utils.addEntities(searchFilter.getIndexFieldName()) %>"><fmt:message key="<%= fkey %>"/></option><%
|
||||
}
|
||||
%>
|
||||
</select>
|
||||
@@ -283,15 +284,15 @@
|
||||
<%
|
||||
for (String opt : options)
|
||||
{
|
||||
String fkey = "jsp.search.filter.op."+opt;
|
||||
%><option value="<%= opt %>"><fmt:message key="<%= fkey %>"/></option><%
|
||||
String fkey = "jsp.search.filter.op." + Escape.uriParam(opt);
|
||||
%><option value="<%= Utils.addEntities(opt) %>"><fmt:message key="<%= fkey %>"/></option><%
|
||||
}
|
||||
%>
|
||||
</select>
|
||||
<input type="text" id="filterquery" name="filterquery" size="45" required="required" />
|
||||
<input type="hidden" value="<%= rpp %>" name="rpp" />
|
||||
<input type="hidden" value="<%= sortedBy %>" name="sort_by" />
|
||||
<input type="hidden" value="<%= order %>" name="order" />
|
||||
<input type="hidden" value="<%= Utils.addEntities(sortedBy) %>" name="sort_by" />
|
||||
<input type="hidden" value="<%= Utils.addEntities(order) %>" name="order" />
|
||||
<input class="btn btn-default" type="submit" value="<fmt:message key="jsp.search.filter.add"/>" onclick="return validateFilters()" />
|
||||
</form>
|
||||
</div>
|
||||
@@ -299,17 +300,17 @@
|
||||
<%-- Include a component for modifying sort by, order, results per page, and et-al limit --%>
|
||||
<div class="discovery-pagination-controls panel-footer">
|
||||
<form action="simple-search" method="get">
|
||||
<input type="hidden" value="<%= StringEscapeUtils.escapeHtml(searchScope) %>" name="location" />
|
||||
<input type="hidden" value="<%= StringEscapeUtils.escapeHtml(query) %>" name="query" />
|
||||
<input type="hidden" value="<%= Utils.addEntities(searchScope) %>" name="location" />
|
||||
<input type="hidden" value="<%= Utils.addEntities(query) %>" name="query" />
|
||||
<% if (appliedFilterQueries.size() > 0 ) {
|
||||
int idx = 1;
|
||||
for (String[] filter : appliedFilters)
|
||||
{
|
||||
boolean found = false;
|
||||
%>
|
||||
<input type="hidden" id="filter_field_<%=idx %>" name="filter_field_<%=idx %>" value="<%= filter[0] %>" />
|
||||
<input type="hidden" id="filter_type_<%=idx %>" name="filter_type_<%=idx %>" value="<%= filter[1] %>" />
|
||||
<input type="hidden" id="filter_value_<%=idx %>" name="filter_value_<%=idx %>" value="<%= StringEscapeUtils.escapeHtml(filter[2]) %>" />
|
||||
<input type="hidden" id="filter_field_<%=idx %>" name="filter_field_<%=idx %>" value="<%= Utils.addEntities(filter[0]) %>" />
|
||||
<input type="hidden" id="filter_type_<%=idx %>" name="filter_type_<%=idx %>" value="<%= Utils.addEntities(filter[1]) %>" />
|
||||
<input type="hidden" id="filter_value_<%=idx %>" name="filter_value_<%=idx %>" value="<%= Utils.addEntities(filter[2]) %>" />
|
||||
<%
|
||||
idx++;
|
||||
}
|
||||
@@ -338,8 +339,8 @@
|
||||
for (String sortBy : sortOptions)
|
||||
{
|
||||
String selected = (sortBy.equals(sortedBy) ? "selected=\"selected\"" : "");
|
||||
String mKey = "search.sort-by." + sortBy;
|
||||
%> <option value="<%= sortBy %>" <%= selected %>><fmt:message key="<%= mKey %>"/></option><%
|
||||
String mKey = "search.sort-by." + Utils.addEntities(sortBy);
|
||||
%> <option value="<%= Utils.addEntities(sortBy) %>" <%= selected %>><fmt:message key="<%= mKey %>"/></option><%
|
||||
}
|
||||
%>
|
||||
</select>
|
||||
@@ -435,7 +436,7 @@ else if( qResults != null)
|
||||
|
||||
// create the URLs accessing the previous and next search result pages
|
||||
String baseURL = request.getContextPath()
|
||||
+ (searchScope != "" ? "/handle/" + searchScope : "")
|
||||
+ (!searchScope.equals("") ? "/handle/" + searchScope : "")
|
||||
+ "/simple-search?query="
|
||||
+ URLEncoder.encode(query,"UTF-8")
|
||||
+ httpFilters
|
||||
@@ -486,7 +487,7 @@ else if( qResults != null)
|
||||
|
||||
if (pageFirst != 1)
|
||||
{
|
||||
%><li><a href="<%= firstURL %>">1</a></li><li>...</li><%
|
||||
%><li><a href="<%= firstURL %>">1</a></li><li class="disabled"><span>...</span></li><%
|
||||
}
|
||||
|
||||
for( long q = pageFirst; q <= pageLast; q++ )
|
||||
@@ -576,7 +577,7 @@ else
|
||||
|
||||
if (pageFirst != 1)
|
||||
{
|
||||
%><li><a href="<%= firstURL %>">1</a></li><li class="disabled"><span>...<span></li><%
|
||||
%><li><a href="<%= firstURL %>">1</a></li><li class="disabled"><span>...</span></li><%
|
||||
}
|
||||
|
||||
for( long q = pageFirst; q <= pageLast; q++ )
|
||||
@@ -631,28 +632,30 @@ else
|
||||
|
||||
for (DiscoverySearchFilterFacet facetConf : facetsConf)
|
||||
{
|
||||
String f = facetConf.getIndexFieldName();
|
||||
List<FacetResult> facet = qResults.getFacetResult(f);
|
||||
if (facet.size() == 0)
|
||||
{
|
||||
facet = qResults.getFacetResult(f+".year");
|
||||
if(qResults!=null) {
|
||||
String f = facetConf.getIndexFieldName();
|
||||
List<FacetResult> facet = qResults.getFacetResult(f);
|
||||
if (facet.size() == 0)
|
||||
{
|
||||
showFacets.put(f, false);
|
||||
continue;
|
||||
facet = qResults.getFacetResult(f+".year");
|
||||
if (facet.size() == 0)
|
||||
{
|
||||
showFacets.put(f, false);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
boolean showFacet = false;
|
||||
for (FacetResult fvalue : facet)
|
||||
{
|
||||
if(!appliedFilterQueries.contains(f+"::"+fvalue.getFilterType()+"::"+fvalue.getAsFilterQuery()))
|
||||
{
|
||||
showFacet = true;
|
||||
break;
|
||||
boolean showFacet = false;
|
||||
for (FacetResult fvalue : facet)
|
||||
{
|
||||
if(!appliedFilterQueries.contains(f+"::"+fvalue.getFilterType()+"::"+fvalue.getAsFilterQuery()))
|
||||
{
|
||||
showFacet = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
showFacets.put(f, showFacet);
|
||||
brefine = brefine || showFacet;
|
||||
showFacets.put(f, showFacet);
|
||||
brefine = brefine || showFacet;
|
||||
}
|
||||
}
|
||||
if (brefine) {
|
||||
%>
|
||||
@@ -688,7 +691,7 @@ else
|
||||
if (idx != limit && !appliedFilterQueries.contains(f+"::"+fvalue.getFilterType()+"::"+fvalue.getAsFilterQuery()))
|
||||
{
|
||||
%><li class="list-group-item"><span class="badge"><%= fvalue.getCount() %></span> <a href="<%= request.getContextPath()
|
||||
+ (searchScope!=""?"/handle/"+searchScope:"")
|
||||
+ (!searchScope.equals("")?"/handle/"+searchScope:"")
|
||||
+ "/simple-search?query="
|
||||
+ URLEncoder.encode(query,"UTF-8")
|
||||
+ "&sort_by=" + sortedBy
|
||||
@@ -713,7 +716,7 @@ else
|
||||
%><li class="list-group-item"><span style="visibility: hidden;">.</span>
|
||||
<% if (currFp > 0) { %>
|
||||
<a class="pull-left" href="<%= request.getContextPath()
|
||||
+ (searchScope!=""?"/handle/"+searchScope:"")
|
||||
+ (!searchScope.equals("")?"/handle/"+searchScope:"")
|
||||
+ "/simple-search?query="
|
||||
+ URLEncoder.encode(query,"UTF-8")
|
||||
+ "&sort_by=" + sortedBy
|
||||
@@ -725,7 +728,7 @@ else
|
||||
<% } %>
|
||||
<% if (idx == limit) { %>
|
||||
<a href="<%= request.getContextPath()
|
||||
+ (searchScope!=""?"/handle/"+searchScope:"")
|
||||
+ (!searchScope.equals("")?"/handle/"+searchScope:"")
|
||||
+ "/simple-search?query="
|
||||
+ URLEncoder.encode(query,"UTF-8")
|
||||
+ "&sort_by=" + sortedBy
|
||||
@@ -747,4 +750,3 @@ else
|
||||
<% } %>
|
||||
</dspace:sidebar>
|
||||
</dspace:layout>
|
||||
|
||||
|
@@ -396,7 +396,7 @@ if (pageTotal > pageCurrent)
|
||||
</p>
|
||||
|
||||
<form id="dso-display" action="<%=request.getContextPath()%>/dso-display" method="post">
|
||||
<input type="hidden" name="query" value="<%=query%>"/>
|
||||
<input type="hidden" name="query" value="<%=StringEscapeUtils.escapeHtml(query)%>"/>
|
||||
<input type="hidden" name="rpp" value="<%=rpp%>"/>
|
||||
<input type="hidden" name="page" value="<%=pageCurrent%>"/>
|
||||
<input type="hidden" name="sort_by" value="<%=(so != null ? so.getNumber() : 0)%>"/>
|
||||
|
@@ -9,7 +9,7 @@
|
||||
HTML5 Shiv v3.6.2pre | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed
|
||||
*/
|
||||
(function(l,f){function m(){var a=e.elements;return"string"==typeof a?a.split(" "):a}function i(a){var b=n[a[o]];b||(b={},h++,a[o]=h,n[h]=b);return b}function p(a,b,c){b||(b=f);if(g)return b.createElement(a);c||(c=i(b));b=c.cache[a]?c.cache[a].cloneNode():r.test(a)?(c.cache[a]=c.createElem(a)).cloneNode():c.createElem(a);return b.canHaveChildren&&!s.test(a)?c.frag.appendChild(b):b}function t(a,b){if(!b.cache)b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag();
|
||||
a.createElement=function(c){return!e.shivMethods?b.createElem(c):p(c,a,b)};a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+m().join().replace(/\w+/g,function(a){b.createElem(a);b.frag.createElement(a);return'c("'+a+'")'})+");return n}")(e,b.frag)}function q(a){a||(a=f);var b=i(a);if(e.shivCSS&&!j&&!b.hasCSS){var c,d=a;c=d.createElement("p");d=d.getElementsByTagName("head")[0]||d.documentElement;c.innerHTML="x<style>article,aside,figcaption,figure,footer,header,hgroup,nav,section{display:block}mark{background:#FF0;color:#000}</style>";
|
||||
a.createElement=function(c){return!e.shivMethods?b.createElem(c):p(c,a,b)};a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+m().join().replace(/\w+/g,function(a){b.createElem(a);b.frag.createElement(a);return'c("'+a+'")'})+");return n}")(e,b.frag)}function q(a){a||(a=f);var b=i(a);if(e.shivCSS&&!j&&!b.hasCSS){var c,d=a;c=d.createElement("p");d=d.getElementsByTagName("head")[0]||d.documentElement;c.innerHTML="x<style>article,aside,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}</style>";
|
||||
c=d.insertBefore(c.lastChild,d.firstChild);b.hasCSS=!!c}g||t(a,b);return a}var k=l.html5||{},s=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,r=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,j,o="_html5shiv",h=0,n={},g;(function(){try{var a=f.createElement("a");a.innerHTML="<xyz></xyz>";j="hidden"in a;var b;if(!(b=1==a.childNodes.length)){f.createElement("a");var c=f.createDocumentFragment();b="undefined"==typeof c.cloneNode||
|
||||
"undefined"==typeof c.createDocumentFragment||"undefined"==typeof c.createElement}g=b}catch(d){g=j=!0}})();var e={elements:k.elements||"abbr article aside audio bdi canvas data datalist details figcaption figure footer header hgroup mark meter nav output progress section summary time video",version:"3.6.2pre",shivCSS:!1!==k.shivCSS,supportsUnknownElements:g,shivMethods:!1!==k.shivMethods,type:"default",shivDocument:q,createElement:p,createDocumentFragment:function(a,b){a||(a=f);if(g)return a.createDocumentFragment();
|
||||
"undefined"==typeof c.createDocumentFragment||"undefined"==typeof c.createElement}g=b}catch(d){g=j=!0}})();var e={elements:k.elements||"abbr article aside audio bdi canvas data datalist details figcaption figure footer header hgroup main mark meter nav output progress section summary time video",version:"3.6.2pre",shivCSS:!1!==k.shivCSS,supportsUnknownElements:g,shivMethods:!1!==k.shivMethods,type:"default",shivDocument:q,createElement:p,createDocumentFragment:function(a,b){a||(a=f);if(g)return a.createDocumentFragment();
|
||||
for(var b=b||i(a),c=b.frag.cloneNode(),d=0,e=m(),h=e.length;d<h;d++)c.createElement(e[d]);return c}};l.html5=e;q(f)})(this,document);
|
||||
|
@@ -421,7 +421,16 @@
|
||||
testChunks: true,
|
||||
throttleProgressCallbacks:1,
|
||||
method: "multipart",
|
||||
query:{workspace_item_id:'<%= subInfo.getSubmissionItem().getID()%>'}
|
||||
<%
|
||||
if (subInfo.isInWorkflow())
|
||||
{
|
||||
%>
|
||||
query:{workflow_id:'<%= subInfo.getSubmissionItem().getID()%>'}
|
||||
<%
|
||||
} else {
|
||||
%>
|
||||
query:{workspace_item_id:'<%= subInfo.getSubmissionItem().getID()%>'}
|
||||
<%}%>
|
||||
});
|
||||
// Resumable.js isn't supported, fall back on a different method
|
||||
|
||||
|
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>5.0</version>
|
||||
<version>5.5</version>
|
||||
<relativePath>../..</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>5.0</version>
|
||||
<version>5.5</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@@ -32,7 +32,11 @@
|
||||
|
||||
|
||||
|
||||
<!-- kernel start listener (from impl), starts up the kernel for standalong webapps -->
|
||||
<!--
|
||||
DSpace Kernel startup listener. This listener is in charge of initializing/starting the
|
||||
DSpace Kernel. It MUST be listed BEFORE any other DSpace listeners, as DSpace services
|
||||
will not function until the Kernel is initialized.
|
||||
-->
|
||||
<listener>
|
||||
<listener-class>org.dspace.servicemanager.servlet.DSpaceKernelServletContextListener</listener-class>
|
||||
</listener>
|
||||
|
@@ -8,7 +8,7 @@
|
||||
<parent>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<groupId>org.dspace</groupId>
|
||||
<version>5.0</version>
|
||||
<version>5.5</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
<!-- This is the path to the root [dspace-src] directory. -->
|
||||
<root.basedir>${basedir}/..</root.basedir>
|
||||
<spring.version>3.2.5.RELEASE</spring.version>
|
||||
<xoai.version>3.2.9</xoai.version>
|
||||
<xoai.version>3.2.10</xoai.version>
|
||||
<jtwig.version>2.0.1</jtwig.version>
|
||||
</properties>
|
||||
|
||||
@@ -84,7 +84,7 @@
|
||||
<dependencies>
|
||||
<!-- OAI Data Provider Framework -->
|
||||
<dependency>
|
||||
<groupId>com.lyncode</groupId>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>xoai</artifactId>
|
||||
<version>${xoai.version}</version>
|
||||
</dependency>
|
||||
|
@@ -128,7 +128,7 @@ public class XOAI {
|
||||
if (clean) {
|
||||
clearIndex();
|
||||
System.out.println("Using full import.");
|
||||
this.indexAll();
|
||||
result = this.indexAll();
|
||||
} else {
|
||||
SolrQuery solrParams = new SolrQuery("*:*")
|
||||
.addField("item.lastmodified")
|
||||
@@ -167,10 +167,11 @@ public class XOAI {
|
||||
System.out
|
||||
.println("Incremental import. Searching for documents modified after: "
|
||||
+ last.toString());
|
||||
|
||||
String sqlQuery = "SELECT item_id FROM item WHERE in_archive=TRUE AND discoverable=TRUE AND last_modified > ?";
|
||||
// Index both in_archive items AND withdrawn items. Withdrawn items will be flagged withdrawn
|
||||
// (in order to notify external OAI harvesters of their new status)
|
||||
String sqlQuery = "SELECT item_id FROM item WHERE (in_archive=TRUE OR withdrawn=TRUE) AND discoverable=TRUE AND last_modified > ?";
|
||||
if(DatabaseManager.isOracle()){
|
||||
sqlQuery = "SELECT item_id FROM item WHERE in_archive=1 AND discoverable=1 AND last_modified > ?";
|
||||
sqlQuery = "SELECT item_id FROM item WHERE (in_archive=1 OR withdrawn=1) AND discoverable=1 AND last_modified > ?";
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -187,10 +188,11 @@ public class XOAI {
|
||||
private int indexAll() throws DSpaceSolrIndexerException {
|
||||
System.out.println("Full import");
|
||||
try {
|
||||
|
||||
String sqlQuery = "SELECT item_id FROM item WHERE in_archive=TRUE AND discoverable=TRUE";
|
||||
// Index both in_archive items AND withdrawn items. Withdrawn items will be flagged withdrawn
|
||||
// (in order to notify external OAI harvesters of their new status)
|
||||
String sqlQuery = "SELECT item_id FROM item WHERE (in_archive=TRUE OR withdrawn=TRUE) AND discoverable=TRUE";
|
||||
if(DatabaseManager.isOracle()){
|
||||
sqlQuery = "SELECT item_id FROM item WHERE in_archive=1 AND discoverable=1";
|
||||
sqlQuery = "SELECT item_id FROM item WHERE (in_archive=1 OR withdrawn=1) AND discoverable=1";
|
||||
}
|
||||
|
||||
TableRowIterator iterator = DatabaseManager.query(context,
|
||||
@@ -244,7 +246,9 @@ public class XOAI {
|
||||
String handle = item.getHandle();
|
||||
doc.addField("item.handle", handle);
|
||||
doc.addField("item.lastmodified", item.getLastModified());
|
||||
doc.addField("item.submitter", item.getSubmitter().getEmail());
|
||||
if (item.getSubmitter() != null) {
|
||||
doc.addField("item.submitter", item.getSubmitter().getEmail());
|
||||
}
|
||||
doc.addField("item.deleted", item.isWithdrawn() ? "true" : "false");
|
||||
for (Collection col : item.getCollections())
|
||||
doc.addField("item.collections",
|
||||
@@ -287,17 +291,14 @@ public class XOAI {
|
||||
}
|
||||
|
||||
private boolean isPublic(Item item) {
|
||||
boolean pub = false;
|
||||
try {
|
||||
AuthorizeManager.authorizeAction(context, item, Constants.READ);
|
||||
for (Bundle b : item.getBundles())
|
||||
AuthorizeManager.authorizeAction(context, b, Constants.READ);
|
||||
return true;
|
||||
} catch (AuthorizeException ex) {
|
||||
log.debug(ex.getMessage());
|
||||
//Check if READ access allowed on this Item
|
||||
pub = AuthorizeManager.authorizeActionBoolean(context, item, Constants.READ);
|
||||
} catch (SQLException ex) {
|
||||
log.error(ex.getMessage());
|
||||
}
|
||||
return false;
|
||||
return pub;
|
||||
}
|
||||
|
||||
|
||||
@@ -355,6 +356,8 @@ public class XOAI {
|
||||
XOAICacheService cacheService = applicationContext.getBean(XOAICacheService.class);
|
||||
XOAIItemCacheService itemCacheService = applicationContext.getBean(XOAIItemCacheService.class);
|
||||
|
||||
Context ctx = null;
|
||||
|
||||
try {
|
||||
CommandLineParser parser = new PosixParser();
|
||||
Options options = new Options();
|
||||
@@ -394,7 +397,7 @@ public class XOAI {
|
||||
String command = line.getArgs()[0];
|
||||
|
||||
if (COMMAND_IMPORT.equals(command)) {
|
||||
Context ctx = new Context();
|
||||
ctx = new Context();
|
||||
XOAI indexer = new XOAI(ctx,
|
||||
line.hasOption('o'),
|
||||
line.hasOption('c'),
|
||||
@@ -404,21 +407,17 @@ public class XOAI {
|
||||
|
||||
int imported = indexer.index();
|
||||
if (imported > 0) cleanCache(itemCacheService, cacheService);
|
||||
|
||||
ctx.abort();
|
||||
} else if (COMMAND_CLEAN_CACHE.equals(command)) {
|
||||
cleanCache(itemCacheService, cacheService);
|
||||
} else if (COMMAND_COMPILE_ITEMS.equals(command)) {
|
||||
|
||||
Context ctx = new Context();
|
||||
ctx = new Context();
|
||||
XOAI indexer = new XOAI(ctx, line.hasOption('v'));
|
||||
applicationContext.getAutowireCapableBeanFactory().autowireBean(indexer);
|
||||
|
||||
indexer.compile();
|
||||
|
||||
cleanCache(itemCacheService, cacheService);
|
||||
|
||||
ctx.abort();
|
||||
} else if (COMMAND_ERASE_COMPILED_ITEMS.equals(command)) {
|
||||
cleanCompiledItems(itemCacheService);
|
||||
cleanCache(itemCacheService, cacheService);
|
||||
@@ -436,6 +435,12 @@ public class XOAI {
|
||||
}
|
||||
log.error(ex.getMessage(), ex);
|
||||
}
|
||||
finally
|
||||
{
|
||||
// Abort our context, if still open
|
||||
if(ctx!=null && ctx.isValid())
|
||||
ctx.abort();
|
||||
}
|
||||
}
|
||||
|
||||
private static void cleanCompiledItems(XOAIItemCacheService itemCacheService) throws IOException {
|
||||
|
@@ -145,7 +145,7 @@ public class DSpaceOAIDataProvider
|
||||
}
|
||||
|
||||
private void closeContext(Context context) {
|
||||
if (context != null)
|
||||
if (context != null && context.isValid())
|
||||
context.abort();
|
||||
}
|
||||
|
||||
|
@@ -8,12 +8,10 @@
|
||||
|
||||
package org.dspace.xoai.filter;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import com.lyncode.builder.ListBuilder;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterList;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterMap;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterValue;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.SimpleType;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.LogManager;
|
||||
import org.apache.log4j.Logger;
|
||||
@@ -25,12 +23,12 @@ import org.dspace.xoai.exceptions.InvalidMetadataFieldException;
|
||||
import org.dspace.xoai.filter.data.DSpaceMetadataFilterOperator;
|
||||
import org.dspace.xoai.filter.results.DatabaseFilterResult;
|
||||
import org.dspace.xoai.filter.results.SolrFilterResult;
|
||||
import org.dspace.xoai.services.api.database.FieldResolver;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import com.google.common.base.Function;
|
||||
import com.lyncode.builder.ListBuilder;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterList;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterValue;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.SimpleType;
|
||||
|
||||
/**
|
||||
* @author Lyncode Development Team <dspace@lyncode.com>
|
||||
@@ -41,14 +39,6 @@ public class DSpaceAtLeastOneMetadataFilter extends DSpaceFilter {
|
||||
private String field;
|
||||
private DSpaceMetadataFilterOperator operator = DSpaceMetadataFilterOperator.UNDEF;
|
||||
private List<String> values;
|
||||
private ParameterMap configuration;
|
||||
|
||||
public DSpaceAtLeastOneMetadataFilter(ParameterMap configuration) {
|
||||
this.configuration = configuration;
|
||||
}
|
||||
|
||||
@Autowired
|
||||
FieldResolver fieldResolver;
|
||||
|
||||
private String getField() {
|
||||
if (field == null) {
|
||||
@@ -249,7 +239,4 @@ public class DSpaceAtLeastOneMetadataFilter extends DSpaceFilter {
|
||||
}
|
||||
}
|
||||
|
||||
public ParameterMap getConfiguration() {
|
||||
return configuration;
|
||||
}
|
||||
}
|
||||
|
@@ -8,11 +8,13 @@
|
||||
|
||||
package org.dspace.xoai.filter;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.log4j.LogManager;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.authorize.AuthorizeManager;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
@@ -21,10 +23,6 @@ import org.dspace.xoai.data.DSpaceItem;
|
||||
import org.dspace.xoai.filter.results.DatabaseFilterResult;
|
||||
import org.dspace.xoai.filter.results.SolrFilterResult;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Lyncode Development Team <dspace@lyncode.com>
|
||||
@@ -32,11 +30,6 @@ import java.util.List;
|
||||
public class DSpaceAuthorizationFilter extends DSpaceFilter
|
||||
{
|
||||
private static Logger log = LogManager.getLogger(DSpaceAuthorizationFilter.class);
|
||||
private Context context;
|
||||
|
||||
public DSpaceAuthorizationFilter (Context context) {
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DatabaseFilterResult buildDatabaseQuery(Context context)
|
||||
@@ -54,29 +47,25 @@ public class DSpaceAuthorizationFilter extends DSpaceFilter
|
||||
@Override
|
||||
public boolean isShown(DSpaceItem item)
|
||||
{
|
||||
boolean pub = false;
|
||||
try
|
||||
{
|
||||
// If Handle or Item are not found, return false
|
||||
String handle = DSpaceItem.parseHandle(item.getIdentifier());
|
||||
if (handle == null) return false;
|
||||
if (handle == null)
|
||||
return false;
|
||||
Item dspaceItem = (Item) HandleManager.resolveToObject(context, handle);
|
||||
AuthorizeManager.authorizeAction(context, dspaceItem, Constants.READ);
|
||||
for (Bundle b : dspaceItem.getBundles())
|
||||
AuthorizeManager.authorizeAction(context, b, Constants.READ);
|
||||
return true;
|
||||
}
|
||||
catch (AuthorizeException ex)
|
||||
{
|
||||
log.error(ex.getMessage(), ex);
|
||||
if (dspaceItem == null)
|
||||
return false;
|
||||
|
||||
// Check if READ access allowed on Item
|
||||
pub = AuthorizeManager.authorizeActionBoolean(context, dspaceItem, Constants.READ);
|
||||
}
|
||||
catch (SQLException ex)
|
||||
{
|
||||
log.error(ex.getMessage(), ex);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
log.error(ex.getMessage(), ex);
|
||||
}
|
||||
return false;
|
||||
return pub;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -9,10 +9,13 @@ package org.dspace.xoai.filter;
|
||||
|
||||
import com.lyncode.xoai.dataprovider.data.Filter;
|
||||
import com.lyncode.xoai.dataprovider.data.ItemIdentifier;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterMap;
|
||||
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.xoai.data.DSpaceItem;
|
||||
import org.dspace.xoai.filter.results.DatabaseFilterResult;
|
||||
import org.dspace.xoai.filter.results.SolrFilterResult;
|
||||
import org.dspace.xoai.services.api.database.FieldResolver;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -20,6 +23,15 @@ import org.dspace.xoai.filter.results.SolrFilterResult;
|
||||
*/
|
||||
public abstract class DSpaceFilter implements Filter
|
||||
{
|
||||
/** The configuration from xoai.xml file */
|
||||
protected ParameterMap configuration;
|
||||
|
||||
/** The configuration from xoai.xml file */
|
||||
protected FieldResolver fieldResolver;
|
||||
|
||||
/** The oai context */
|
||||
protected Context context;
|
||||
|
||||
public abstract DatabaseFilterResult buildDatabaseQuery(Context context);
|
||||
public abstract SolrFilterResult buildSolrQuery();
|
||||
public abstract boolean isShown(DSpaceItem item);
|
||||
@@ -33,4 +45,55 @@ public abstract class DSpaceFilter implements Filter
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the configuration map if defined in xoai.xml, otherwise null.
|
||||
*/
|
||||
public ParameterMap getConfiguration()
|
||||
{
|
||||
return configuration;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param configuration
|
||||
* the configuration map to set
|
||||
*/
|
||||
public void setConfiguration(ParameterMap configuration)
|
||||
{
|
||||
this.configuration = configuration;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the fieldResolver
|
||||
*/
|
||||
public FieldResolver getFieldResolver()
|
||||
{
|
||||
return fieldResolver;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param fieldResolver
|
||||
* the fieldResolver to set
|
||||
*/
|
||||
public void setFieldResolver(FieldResolver fieldResolver)
|
||||
{
|
||||
this.fieldResolver = fieldResolver;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the context
|
||||
*/
|
||||
public Context getContext()
|
||||
{
|
||||
return context;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param context
|
||||
* the context to set
|
||||
*/
|
||||
public void setContext(Context context)
|
||||
{
|
||||
this.context = context;
|
||||
}
|
||||
}
|
||||
|
@@ -7,9 +7,10 @@
|
||||
*/
|
||||
package org.dspace.xoai.filter;
|
||||
|
||||
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterMap;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterValue;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.SimpleType;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.log4j.LogManager;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.core.Constants;
|
||||
@@ -18,11 +19,9 @@ import org.dspace.xoai.data.DSpaceItem;
|
||||
import org.dspace.xoai.exceptions.InvalidMetadataFieldException;
|
||||
import org.dspace.xoai.filter.results.DatabaseFilterResult;
|
||||
import org.dspace.xoai.filter.results.SolrFilterResult;
|
||||
import org.dspace.xoai.services.api.database.FieldResolver;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterValue;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.SimpleType;
|
||||
|
||||
/**
|
||||
* This filter allows one to retrieve (from the data source) those items
|
||||
@@ -38,14 +37,7 @@ public class DSpaceMetadataExistsFilter extends DSpaceFilter {
|
||||
private static Logger log = LogManager
|
||||
.getLogger(DSpaceMetadataExistsFilter.class);
|
||||
|
||||
private FieldResolver fieldResolver;
|
||||
private List<String> fields;
|
||||
private ParameterMap configuration;
|
||||
|
||||
public DSpaceMetadataExistsFilter(FieldResolver fieldResolver, ParameterMap configuration) {
|
||||
this.fieldResolver = fieldResolver;
|
||||
this.configuration = configuration;
|
||||
}
|
||||
|
||||
private List<String> getFields() {
|
||||
if (this.fields == null) {
|
||||
@@ -114,7 +106,4 @@ public class DSpaceMetadataExistsFilter extends DSpaceFilter {
|
||||
return new SolrFilterResult(cond.toString());
|
||||
}
|
||||
|
||||
public ParameterMap getConfiguration() {
|
||||
return configuration;
|
||||
}
|
||||
}
|
||||
|
@@ -48,10 +48,12 @@ public class DSpaceSetSpecFilter extends DSpaceFilter
|
||||
{
|
||||
try
|
||||
{
|
||||
DSpaceObject dso = handleResolver.resolve(setSpec.replace("col_", ""));
|
||||
return new DatabaseFilterResult(
|
||||
"EXISTS (SELECT tmp.* FROM collection2item tmp WHERE tmp.resource_id=i.item_id AND collection_id = ?)",
|
||||
DSpaceObject dso = handleResolver.resolve(setSpec.replace("col_", "").replace("_", "/"));
|
||||
if(dso != null){
|
||||
return new DatabaseFilterResult(
|
||||
"EXISTS (SELECT tmp.* FROM collection2item tmp WHERE tmp.resource_id=i.item_id AND collection_id = ?)",
|
||||
dso.getID());
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
@@ -62,12 +64,14 @@ public class DSpaceSetSpecFilter extends DSpaceFilter
|
||||
{
|
||||
try
|
||||
{
|
||||
DSpaceObject dso = handleResolver.resolve(setSpec.replace("com_", ""));
|
||||
List<Integer> list = collectionsService.getAllSubCollections(dso.getID());
|
||||
String subCollections = StringUtils.join(list.iterator(), ",");
|
||||
return new DatabaseFilterResult(
|
||||
"EXISTS (SELECT tmp.* FROM collection2item tmp WHERE tmp.resource_id=i.item_id AND collection_id IN ("
|
||||
DSpaceObject dso = handleResolver.resolve(setSpec.replace("com_", "").replace("_", "/"));
|
||||
if(dso != null){
|
||||
List<Integer> list = collectionsService.getAllSubCollections(dso.getID());
|
||||
String subCollections = StringUtils.join(list.iterator(), ",");
|
||||
return new DatabaseFilterResult(
|
||||
"EXISTS (SELECT tmp.* FROM collection2item tmp WHERE tmp.resource_id=i.item_id AND collection_id IN ("
|
||||
+ subCollections + "))");
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
|
@@ -0,0 +1,64 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.xoai.filter;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.storage.rdbms.DatabaseManager;
|
||||
import org.dspace.xoai.data.DSpaceItem;
|
||||
import org.dspace.xoai.filter.results.DatabaseFilterResult;
|
||||
import org.dspace.xoai.filter.results.SolrFilterResult;
|
||||
|
||||
/**
|
||||
* Filter for Withdrawn items. Enabling this filter allows tombstones for
|
||||
* withdrawn items to be accessible via OAI-PMH. This allows us to properly
|
||||
* flag withdrawn items with a "deleted" status. For more info on OAI-PMH
|
||||
* "deleted" status, see:
|
||||
* http://www.openarchives.org/OAI/openarchivesprotocol.html#deletion
|
||||
* <P>
|
||||
* (Don't worry, a tombstone doesn't display the withdrawn item's metadata or files.)
|
||||
*
|
||||
* @author Tim Donohue
|
||||
*/
|
||||
public class DSpaceWithdrawnFilter extends DSpaceFilter {
|
||||
|
||||
@Override
|
||||
public DatabaseFilterResult buildDatabaseQuery(Context context)
|
||||
{
|
||||
List<Object> params = new ArrayList<Object>();
|
||||
|
||||
String filter = "i.withdrawn=TRUE";
|
||||
if(DatabaseManager.isOracle())
|
||||
filter = "i.withdrawn=1";
|
||||
|
||||
return new DatabaseFilterResult(filter, params);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isShown(DSpaceItem item)
|
||||
{
|
||||
// For DSpace, if an Item is withdrawn, "isDeleted()" will be true.
|
||||
// In this scenario, we want a withdrawn item to be *shown* so that
|
||||
// we can properly respond with a "deleted" status via OAI-PMH.
|
||||
// Don't worry, this does NOT make the metadata public for withdrawn items,
|
||||
// it merely provides an item "tombstone" via OAI-PMH.
|
||||
if (item.isDeleted())
|
||||
return true;
|
||||
else
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SolrFilterResult buildSolrQuery()
|
||||
{
|
||||
// In Solr, we store withdrawn items as "deleted".
|
||||
// See org.dspace.xoai.app.XOAI, index(Item) method.
|
||||
return new SolrFilterResult("item.deleted:true");
|
||||
}
|
||||
}
|
@@ -49,7 +49,12 @@ public class DateUntilFilter extends DSpaceFilter
|
||||
@Override
|
||||
public SolrFilterResult buildSolrQuery()
|
||||
{
|
||||
String format = dateProvider.format(date).replace("Z", ".999Z"); // Tweak to set the millisecon
|
||||
String format = dateProvider.format(date).replace("Z", ".999Z"); // Tweak to set the milliseconds
|
||||
// if date has timestamp of 00:00:00, switch it to refer to end of day
|
||||
if (format.substring(11, 19).equals("00:00:00"))
|
||||
{
|
||||
format = format.substring(0, 11) + "23:59:59" + format.substring(19);
|
||||
}
|
||||
return new SolrFilterResult("item.lastmodified:[* TO "
|
||||
+ ClientUtils.escapeQueryChars(format) + "]");
|
||||
}
|
||||
|
@@ -28,7 +28,7 @@ public class NotFilter extends DSpaceFilter {
|
||||
|
||||
@Override
|
||||
public SolrFilterResult buildSolrQuery() {
|
||||
return new SolrFilterResult("NOT("+inFilter.buildSolrQuery()+")");
|
||||
return new SolrFilterResult("*:* AND NOT(" + inFilter.buildSolrQuery().getQuery() + ")");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -36,7 +36,7 @@ public class OrFilter extends DSpaceFilter {
|
||||
|
||||
@Override
|
||||
public SolrFilterResult buildSolrQuery() {
|
||||
return new SolrFilterResult("("+left.buildSolrQuery()+") OR ("+right.buildSolrQuery()+")");
|
||||
return new SolrFilterResult("("+left.buildSolrQuery().getQuery()+") OR ("+right.buildSolrQuery().getQuery()+")");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -52,12 +52,18 @@ public class DSpaceDatabaseQueryResolver implements DatabaseQueryResolver {
|
||||
}
|
||||
countParameters.addAll(parameters);
|
||||
|
||||
String whereInArchive = "WHERE i.in_archive=true";
|
||||
if(DatabaseManager.isOracle())
|
||||
{
|
||||
whereInArchive = "WHERE i.in_archive=1";
|
||||
}
|
||||
|
||||
if (!where.equals("")) {
|
||||
query += " WHERE i.in_archive=true AND " + where;
|
||||
countQuery += " WHERE i.in_archive=true AND " + where;
|
||||
query += " " + whereInArchive + " AND " + where;
|
||||
countQuery += " " + whereInArchive + " AND " + where;
|
||||
} else {
|
||||
query += " WHERE i.in_archive=true";
|
||||
countQuery += " WHERE i.in_archive=true";
|
||||
query += " " + whereInArchive;
|
||||
countQuery += " " + whereInArchive;
|
||||
}
|
||||
|
||||
query += " ORDER BY i.item_id";
|
||||
|
@@ -7,29 +7,46 @@
|
||||
*/
|
||||
package org.dspace.xoai.services.impl.resources;
|
||||
|
||||
import com.lyncode.xoai.dataprovider.services.api.ResourceResolver;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
|
||||
import javax.xml.transform.Transformer;
|
||||
import javax.xml.transform.TransformerConfigurationException;
|
||||
import javax.xml.transform.TransformerFactory;
|
||||
import javax.xml.transform.stream.StreamSource;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
public class DSpaceResourceResolver implements ResourceResolver {
|
||||
private static final TransformerFactory transformerFactory = TransformerFactory.newInstance();
|
||||
private final String basePath = ConfigurationManager.getProperty("oai", "config.dir");
|
||||
import javax.xml.transform.Source;
|
||||
import javax.xml.transform.Transformer;
|
||||
import javax.xml.transform.TransformerConfigurationException;
|
||||
import javax.xml.transform.TransformerFactory;
|
||||
import javax.xml.transform.stream.StreamSource;
|
||||
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
|
||||
import com.lyncode.xoai.dataprovider.services.api.ResourceResolver;
|
||||
|
||||
public class DSpaceResourceResolver implements ResourceResolver
|
||||
{
|
||||
private static final TransformerFactory transformerFactory = TransformerFactory
|
||||
.newInstance();
|
||||
|
||||
private final String basePath = ConfigurationManager.getProperty("oai",
|
||||
"config.dir");
|
||||
|
||||
@Override
|
||||
public InputStream getResource(String path) throws IOException {
|
||||
public InputStream getResource(String path) throws IOException
|
||||
{
|
||||
return new FileInputStream(new File(basePath, path));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Transformer getTransformer(String path) throws IOException, TransformerConfigurationException {
|
||||
return transformerFactory.newTransformer(new StreamSource(getResource(path)));
|
||||
public Transformer getTransformer(String path) throws IOException,
|
||||
TransformerConfigurationException
|
||||
{
|
||||
// construct a Source that reads from an InputStream
|
||||
Source mySrc = new StreamSource(getResource(path));
|
||||
// specify a system ID (the path to the XSLT-file on the filesystem)
|
||||
// so the Source can resolve relative URLs that are encountered in
|
||||
// XSLT-files (like <xsl:import href="utils.xsl"/>)
|
||||
String systemId = basePath + "/" + path;
|
||||
mySrc.setSystemId(systemId);
|
||||
return transformerFactory.newTransformer(mySrc);
|
||||
}
|
||||
}
|
||||
|
@@ -79,20 +79,25 @@ public class BaseDSpaceFilterResolver implements DSpaceFilterResolver {
|
||||
|
||||
@Override
|
||||
public Filter getFilter(Class<? extends Filter> filterClass, ParameterMap configuration) {
|
||||
if (filterClass.isAssignableFrom(DSpaceAtLeastOneMetadataFilter.class)) {
|
||||
return new DSpaceAtLeastOneMetadataFilter(configuration);
|
||||
} else if (filterClass.isAssignableFrom(DSpaceAuthorizationFilter.class)) {
|
||||
try {
|
||||
return new DSpaceAuthorizationFilter(contextService.getContext());
|
||||
} catch (ContextServiceException e) {
|
||||
LOGGER.error(e.getMessage(), e);
|
||||
return null;
|
||||
Filter result = null;
|
||||
try
|
||||
{
|
||||
result = filterClass.newInstance();
|
||||
if (result instanceof DSpaceFilter)
|
||||
{
|
||||
// add the DSpace filter specific objects
|
||||
((DSpaceFilter) result).setConfiguration(configuration);
|
||||
((DSpaceFilter) result).setContext(contextService.getContext());
|
||||
((DSpaceFilter) result).setFieldResolver(fieldResolver);
|
||||
}
|
||||
} else if (filterClass.isAssignableFrom(DSpaceMetadataExistsFilter.class)) {
|
||||
return new DSpaceMetadataExistsFilter(fieldResolver, configuration);
|
||||
}
|
||||
LOGGER.error("Filter "+filterClass.getName()+" unknown instantiation");
|
||||
return null;
|
||||
catch (InstantiationException | IllegalAccessException
|
||||
| ContextServiceException e)
|
||||
{
|
||||
LOGGER.error("Filter " + filterClass.getName()
|
||||
+ " could not be instantiated", e);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -74,8 +74,12 @@ public class DSpaceRepositoryConfiguration implements RepositoryConfiguration
|
||||
HttpServletRequest request = ((ServletRequestAttributes) RequestContextHolder.currentRequestAttributes()).getRequest();
|
||||
if (baseUrl == null)
|
||||
{
|
||||
baseUrl = request.getRequestURL().toString()
|
||||
.replace(request.getPathInfo(), "");
|
||||
baseUrl = configurationService.getProperty("oai", "dspace.oai.url");
|
||||
if (baseUrl == null) {
|
||||
log.warn("{ OAI 2.0 :: DSpace } Not able to retrieve the dspace.oai.url property from oai.cfg. Falling back to request address");
|
||||
baseUrl = request.getRequestURL().toString()
|
||||
.replace(request.getPathInfo(), "");
|
||||
}
|
||||
}
|
||||
return baseUrl + request.getPathInfo();
|
||||
}
|
||||
@@ -83,7 +87,7 @@ public class DSpaceRepositoryConfiguration implements RepositoryConfiguration
|
||||
@Override
|
||||
public DeleteMethod getDeleteMethod()
|
||||
{
|
||||
return DeleteMethod.PERSISTENT;
|
||||
return DeleteMethod.TRANSIENT;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -17,7 +17,7 @@ import java.util.Locale;
|
||||
import java.util.TimeZone;
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* @author Lyncode Development Team <dspace@lyncode.com>
|
||||
*/
|
||||
public class DateUtils
|
||||
@@ -25,35 +25,49 @@ public class DateUtils
|
||||
|
||||
private static Logger log = LogManager.getLogger(DateUtils.class);
|
||||
|
||||
|
||||
/**
|
||||
* Format a Date object as a valid UTC Date String, per OAI-PMH guidelines
|
||||
* http://www.openarchives.org/OAI/openarchivesprotocol.html#DatestampsResponses
|
||||
*
|
||||
* @param date Date object
|
||||
* @return UTC date string
|
||||
*/
|
||||
public static String format(Date date)
|
||||
{
|
||||
return format(date, true);
|
||||
}
|
||||
public static String format(Date date, boolean init)
|
||||
{
|
||||
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.'000Z'");
|
||||
if (!init) sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.'999Z'");
|
||||
// NOTE: OAI-PMH REQUIRES that all dates be expressed in UTC format
|
||||
// as YYYY-MM-DDThh:mm:ssZ For more details, see
|
||||
// http://www.openarchives.org/OAI/openarchivesprotocol.html#DatestampsResponses
|
||||
SimpleDateFormat sdf = new SimpleDateFormat(
|
||||
"yyyy-MM-dd'T'HH:mm:ss'Z'");
|
||||
// We indicate that the returned date is in Zulu time (UTC) so we have
|
||||
// to set the time zone of sdf correct.
|
||||
// to set the time zone of sdf correctly
|
||||
sdf.setTimeZone(TimeZone.getTimeZone("ZULU"));
|
||||
String ret = sdf.format(date);
|
||||
return ret;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a string into a Date object
|
||||
* @param date string to parse
|
||||
* @return Date
|
||||
*/
|
||||
public static Date parse(String date)
|
||||
{
|
||||
// 2008-01-01T00:00:00Z
|
||||
// First try to parse as a full UTC date/time, e.g. 2008-01-01T00:00:00Z
|
||||
SimpleDateFormat format = new SimpleDateFormat(
|
||||
"yyyy-MM-dd'T'HH:mm:ss'Z'", Locale.getDefault());
|
||||
// format.setTimeZone(TimeZone.getTimeZone("ZULU"));
|
||||
"yyyy-MM-dd'T'HH:mm:ss'Z'");
|
||||
format.setTimeZone(TimeZone.getTimeZone("ZULU"));
|
||||
Date ret;
|
||||
try
|
||||
{
|
||||
ret = format.parse(date);
|
||||
return ret;
|
||||
}
|
||||
catch (ParseException e)
|
||||
catch (ParseException ex)
|
||||
{
|
||||
// If a parse exception, try other logical date/time formats
|
||||
// based on the local timezone
|
||||
format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss",
|
||||
Locale.getDefault());
|
||||
try
|
||||
@@ -62,7 +76,8 @@ public class DateUtils
|
||||
}
|
||||
catch (ParseException e1)
|
||||
{
|
||||
format = new SimpleDateFormat("yyyy-MM-dd", Locale.getDefault());
|
||||
format = new SimpleDateFormat("yyyy-MM-dd",
|
||||
Locale.getDefault());
|
||||
try
|
||||
{
|
||||
return format.parse(date);
|
||||
@@ -85,7 +100,7 @@ public class DateUtils
|
||||
}
|
||||
catch (ParseException e4)
|
||||
{
|
||||
log.error(e4.getMessage(), e);
|
||||
log.error(e4.getMessage(), e4);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -96,7 +111,9 @@ public class DateUtils
|
||||
|
||||
public static Date parseFromSolrDate(String date)
|
||||
{
|
||||
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'", Locale.getDefault());
|
||||
SimpleDateFormat format = new SimpleDateFormat(
|
||||
"yyyy-MM-dd'T'HH:mm:ss'Z'");
|
||||
format.setTimeZone(TimeZone.getTimeZone("ZULU"));
|
||||
Date ret;
|
||||
try
|
||||
{
|
||||
|
@@ -9,9 +9,9 @@ package org.dspace.xoai.util;
|
||||
|
||||
import org.apache.log4j.LogManager;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import org.dspace.app.util.Util;
|
||||
import org.dspace.core.Constants;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLEncoder;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -24,7 +24,8 @@ public class URLUtils
|
||||
public static String encode (String value) {
|
||||
try
|
||||
{
|
||||
return URLEncoder.encode(value, "UTF-8");
|
||||
return Util.encodeBitstreamName(value, Constants.DEFAULT_ENCODING);
|
||||
|
||||
}
|
||||
catch (UnsupportedEncodingException e)
|
||||
{
|
||||
|
@@ -28,6 +28,15 @@
|
||||
<param-value>${dspace.dir}</param-value>
|
||||
</context-param>
|
||||
|
||||
<!--
|
||||
DSpace Kernel startup listener. This listener is in charge of initializing/starting the
|
||||
DSpace Kernel. It MUST be listed BEFORE any other DSpace listeners, as DSpace services
|
||||
will not function until the Kernel is initialized.
|
||||
-->
|
||||
<listener>
|
||||
<listener-class>org.dspace.servicemanager.servlet.DSpaceKernelServletContextListener</listener-class>
|
||||
</listener>
|
||||
|
||||
<listener>
|
||||
<listener-class>org.dspace.app.util.DSpaceContextListener</listener-class>
|
||||
</listener>
|
||||
|
@@ -13,7 +13,8 @@
|
||||
xmlns:lyn="http://www.lyncode.com/fakeNamespace" xmlns:oai_dc="http://www.openarchives.org/OAI/2.0/oai_dc/"
|
||||
xmlns:dc="http://purl.org/dc/doc:elements/1.1/"
|
||||
xmlns:verb="http://informatik.hu-berlin.de/xmlverbatim"
|
||||
exclude-result-prefixes="oai lyn oai_dc dc verb">
|
||||
xmlns:oai_id="http://www.openarchives.org/OAI/2.0/oai-identifier"
|
||||
exclude-result-prefixes="oai lyn oai_dc dc verb oai_id">
|
||||
|
||||
<xsl:output method="html" doctype-public="-//W3C//DTD HTML 4.01//EN" doctype-system="http://www.w3.org/TR/html4/strict.dtd" />
|
||||
|
||||
@@ -167,14 +168,14 @@
|
||||
<tr>
|
||||
<td><b>Repository identifier</b></td>
|
||||
<td>
|
||||
<xsl:value-of select="oai:description/oai:oai-identifier/oai:repositoryIdentifier/text()" />
|
||||
<xsl:value-of select="oai:description/oai_id:oai-identifier/oai_id:repositoryIdentifier/text()" />
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><b>Sample identifier</b></td>
|
||||
<td>
|
||||
<xsl:value-of
|
||||
select="oai:description/oai:oai-identifier/oai:sampleIdentifier/text()" />
|
||||
select="oai:description/oai_id:oai-identifier/oai_id:sampleIdentifier/text()" />
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
@@ -284,11 +285,15 @@
|
||||
<h5>Identifier <small><xsl:value-of select="oai:header/oai:identifier/text()"></xsl:value-of></small></h5>
|
||||
</div>
|
||||
<div class="col-lg-6">
|
||||
<h5>Last Modfied <small><xsl:value-of select="translate(oai:header/oai:datestamp/text(), 'TZ', ' ')"></xsl:value-of></small></h5>
|
||||
<h5>Last Modified <small><xsl:value-of select="translate(oai:header/oai:datestamp/text(), 'TZ', ' ')"></xsl:value-of></small></h5>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="panel-body">
|
||||
<!-- If this record has a "status", display it as a warning -->
|
||||
<xsl:if test="oai:header/@status">
|
||||
<div class="alert alert-warning">Record Status: <xsl:value-of select="oai:header/@status"/></div>
|
||||
</xsl:if>
|
||||
<div class="panel panel-success">
|
||||
<a data-toggle="collapse">
|
||||
<xsl:attribute name="href">#sets<xsl:value-of select="translate(oai:header/oai:identifier/text(), ':/.', '')"></xsl:value-of></xsl:attribute>
|
||||
@@ -349,11 +354,15 @@
|
||||
<h5>Identifier <small><xsl:value-of select="oai:header/oai:identifier/text()"></xsl:value-of></small></h5>
|
||||
</div>
|
||||
<div class="col-lg-6">
|
||||
<h5>Last Modfied <small><xsl:value-of select="translate(oai:header/oai:datestamp/text(), 'TZ', ' ')"></xsl:value-of></small></h5>
|
||||
<h5>Last Modified <small><xsl:value-of select="translate(oai:header/oai:datestamp/text(), 'TZ', ' ')"></xsl:value-of></small></h5>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="panel-body">
|
||||
<!-- If this record has a "status", display it as a warning -->
|
||||
<xsl:if test="oai:header/@status">
|
||||
<div class="alert alert-warning">Record Status: <xsl:value-of select="oai:header/@status"/></div>
|
||||
</xsl:if>
|
||||
<div class="panel panel-success">
|
||||
<div class="panel-heading">
|
||||
<h5 class="panel-title">
|
||||
@@ -409,7 +418,7 @@
|
||||
<h5>Identifier <small><xsl:value-of select="oai:identifier/text()"></xsl:value-of></small></h5>
|
||||
</div>
|
||||
<div class="col-lg-4">
|
||||
<h5>Last Modfied <small><xsl:value-of select="translate(oai:datestamp/text(), 'TZ', ' ')"></xsl:value-of></small></h5>
|
||||
<h5>Last Modified <small><xsl:value-of select="translate(oai:datestamp/text(), 'TZ', ' ')"></xsl:value-of></small></h5>
|
||||
</div>
|
||||
<div class="col-lg-4">
|
||||
<a class="btn btn-default pull-right">
|
||||
@@ -422,6 +431,10 @@
|
||||
</div>
|
||||
</div>
|
||||
<div class="panel-body">
|
||||
<!-- If this record has a "status", display it as a warning -->
|
||||
<xsl:if test="@status">
|
||||
<div class="alert alert-warning">Record Status: <xsl:value-of select="@status"/></div>
|
||||
</xsl:if>
|
||||
<div class="panel panel-success">
|
||||
<a data-toggle="collapse">
|
||||
<xsl:attribute name="href">#sets<xsl:value-of select="translate(oai:identifier/text(), ':/.', '')"></xsl:value-of></xsl:attribute>
|
||||
@@ -500,7 +513,7 @@
|
||||
<div class="text-center">
|
||||
<a class="btn btn-primary">
|
||||
<xsl:attribute name="href">
|
||||
<xsl:value-of select="concat(/oai:OAI-PMH/oai:request/text(), '?verb=ListSets&resumptionToken=', text())"></xsl:value-of>
|
||||
<xsl:value-of select="concat(/oai:OAI-PMH/oai:request/text(), '?verb=',/oai:OAI-PMH/oai:request/@verb,'&resumptionToken=', text())"></xsl:value-of>
|
||||
</xsl:attribute>
|
||||
Show More
|
||||
</a>
|
||||
|
@@ -7,16 +7,13 @@
|
||||
*/
|
||||
package org.dspace.xoai.tests.unit.services.impl.database;
|
||||
|
||||
import com.lyncode.builder.DateBuilder;
|
||||
import com.lyncode.xoai.dataprovider.data.Filter;
|
||||
import com.lyncode.xoai.dataprovider.filter.Scope;
|
||||
import com.lyncode.xoai.dataprovider.filter.ScopedFilter;
|
||||
import com.lyncode.xoai.dataprovider.filter.conditions.AndCondition;
|
||||
import com.lyncode.xoai.dataprovider.filter.conditions.Condition;
|
||||
import com.lyncode.xoai.dataprovider.filter.conditions.CustomCondition;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterList;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterMap;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.StringValue;
|
||||
import static org.hamcrest.CoreMatchers.is;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.xoai.filter.DSpaceMetadataExistsFilter;
|
||||
import org.dspace.xoai.filter.DSpaceSetSpecFilter;
|
||||
@@ -29,12 +26,15 @@ import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.is;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import com.lyncode.builder.DateBuilder;
|
||||
import com.lyncode.xoai.dataprovider.data.Filter;
|
||||
import com.lyncode.xoai.dataprovider.filter.Scope;
|
||||
import com.lyncode.xoai.dataprovider.filter.ScopedFilter;
|
||||
import com.lyncode.xoai.dataprovider.filter.conditions.AndCondition;
|
||||
import com.lyncode.xoai.dataprovider.filter.conditions.Condition;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterList;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterMap;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.StringValue;
|
||||
|
||||
public class DSpaceDatabaseQueryResolverTest extends AbstractQueryResolverTest {
|
||||
private static final Date DATE = new Date();
|
||||
@@ -110,10 +110,17 @@ public class DSpaceDatabaseQueryResolverTest extends AbstractQueryResolverTest {
|
||||
.withValue(FIELD_1)
|
||||
.withName("fields"));
|
||||
|
||||
scopedFilters.add(new ScopedFilter(new CustomCondition(getFilterResolver(),
|
||||
DSpaceMetadataExistsFilter.class,
|
||||
filterConfiguration),
|
||||
Scope.Query));
|
||||
final DSpaceMetadataExistsFilter metadataExistsFilter = new DSpaceMetadataExistsFilter();
|
||||
metadataExistsFilter.setConfiguration(filterConfiguration);
|
||||
metadataExistsFilter.setFieldResolver(theFieldResolver());
|
||||
scopedFilters.add(new ScopedFilter(new Condition()
|
||||
{
|
||||
@Override
|
||||
public Filter getFilter()
|
||||
{
|
||||
return metadataExistsFilter;
|
||||
}
|
||||
}, Scope.Query));
|
||||
|
||||
DatabaseQuery result = underTest.buildQuery(scopedFilters, START, LENGTH);
|
||||
|
||||
@@ -134,10 +141,17 @@ public class DSpaceDatabaseQueryResolverTest extends AbstractQueryResolverTest {
|
||||
)
|
||||
.withName("fields"));
|
||||
|
||||
scopedFilters.add(new ScopedFilter(new CustomCondition(getFilterResolver(),
|
||||
DSpaceMetadataExistsFilter.class,
|
||||
filterConfiguration),
|
||||
Scope.Query));
|
||||
final DSpaceMetadataExistsFilter metadataExistsFilter = new DSpaceMetadataExistsFilter();
|
||||
metadataExistsFilter.setConfiguration(filterConfiguration);
|
||||
metadataExistsFilter.setFieldResolver(theFieldResolver());
|
||||
scopedFilters.add(new ScopedFilter(new Condition()
|
||||
{
|
||||
@Override
|
||||
public Filter getFilter()
|
||||
{
|
||||
return metadataExistsFilter;
|
||||
}
|
||||
}, Scope.Query));
|
||||
|
||||
DatabaseQuery result = underTest.buildQuery(scopedFilters, START, LENGTH);
|
||||
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>5.0</version>
|
||||
<version>5.5</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@@ -40,12 +40,17 @@
|
||||
<url-pattern>/*</url-pattern>
|
||||
</filter-mapping>
|
||||
|
||||
<!--
|
||||
DSpace Kernel startup listener. This listener is in charge of initializing/starting the
|
||||
DSpace Kernel. It MUST be listed BEFORE any other DSpace listeners, as DSpace services
|
||||
will not function until the Kernel is initialized.
|
||||
-->
|
||||
<listener>
|
||||
<listener-class>org.dspace.app.util.DSpaceContextListener</listener-class>
|
||||
<listener-class>org.dspace.servicemanager.servlet.DSpaceKernelServletContextListener</listener-class>
|
||||
</listener>
|
||||
|
||||
<listener>
|
||||
<listener-class>org.dspace.servicemanager.servlet.DSpaceKernelServletContextListener</listener-class>
|
||||
<listener-class>org.dspace.app.util.DSpaceContextListener</listener-class>
|
||||
</listener>
|
||||
|
||||
<servlet>
|
||||
|
@@ -3,73 +3,190 @@
|
||||
A RESTful web services API for DSpace, built using JAX-RS1 JERSEY.
|
||||
|
||||
##Getting Started
|
||||
This REST API is integrated directly into the DSpace code-base.
|
||||
This REST API is integrated directly into the DSpace codebase.
|
||||
|
||||
* Rebuild as normal: mvn + ant
|
||||
* Deploy the webapp (i.e to tomcat)
|
||||
* ```<Context path="/rest" docBase="/dspace/webapps/rest" allowLinking="true"/>```
|
||||
* Rebuild as usual: mvn + ant
|
||||
* Deploy the webapp (i.e to Tomcat)
|
||||
* ```<Context path="/rest" docBase="/dspace/webapps/rest" />```
|
||||
|
||||
|
||||
At this point, this is a READ ONLY API for DSpace, for the anonymous user. Only Anonymous READ Communities, Collections, Items, and Bitstreams are available.
|
||||
REST API can do all CRUD (create, read, update, delete) operations over communities, collections, items, bitstream and bitstream policies. Without logging into the REST API, you have read access as an anonymous user (member of the Anonymous group). If you want to make changes in DSpace using the REST API, you must log into the API using the "login" endpoint and then use the returned token in request header of your subsequent API calls.
|
||||
|
||||
##Endpoints
|
||||
|
||||
| Resource |CREATE|READ list|READ single|Edit|Delete|Search|
|
||||
| ------------- |------|:-------:|-----------|----|------|------|
|
||||
| /communities | | Y | Y | | | |
|
||||
| /collections | | Y | Y | | | |
|
||||
| /items | | | Y | | | |
|
||||
| /bitstreams | | | Y | | | ||
|
||||
| /communities | Y | Y | Y | Y | Y | |
|
||||
| /collections | Y | Y | Y | Y | Y | Y |
|
||||
| /items | Y | Y | Y | Y | Y | Y |
|
||||
| /bitstreams | Y | Y | Y | Y | Y | ||
|
||||
|
||||
Search in collections is possible only by name and search in items only by metadata field.
|
||||
|
||||
###Index
|
||||
Get information on how to use the API
|
||||
- GET http://localhost:8080
|
||||
|
||||
Test whether the REST API is running and available
|
||||
- GET http://localhost:8080/rest/test
|
||||
|
||||
Log into REST API
|
||||
- POST http://localhost:8080/rest/login
|
||||
|
||||
Logout from REST API
|
||||
- POST http://localhost:8080/rest/logout
|
||||
|
||||
Get status of REST API and the logged-in user
|
||||
- GET http://localhost:8080/rest/status
|
||||
|
||||
|
||||
###Communities
|
||||
View the list of top-level communities
|
||||
- http://localhost:8080/rest/communities
|
||||
- GET http://localhost:8080/rest/communities/top-communities
|
||||
|
||||
View the list of all communities
|
||||
- GET http://localhost:8080/rest/communities[?expand={collections,parentCommunity,subCommunities,logo,all}]
|
||||
|
||||
View a specific community
|
||||
- http://localhost:8080/rest/communities/:ID
|
||||
- GET http://localhost:8080/rest/communities/:ID[?expand={collections,parentCommunity,subCommunities,logo,all}]
|
||||
|
||||
View the list of subcollections in community
|
||||
- GET http://localhost:8080/rest/communities/:ID/collections[?expand={items,parentCommunityList,license,logo,all}]
|
||||
|
||||
View the list of subcommunities in community
|
||||
- GET http://localhost:8080/rest/communities/:ID/communities[?expand={collections,parentCommunity,subCommunities,logo,all}]
|
||||
|
||||
Create new top-level community
|
||||
- POST http://localhost:8080/rest/communities
|
||||
|
||||
Create new subcollection in community
|
||||
- POST http://localhost:8080/rest/communities/:ID/collections
|
||||
|
||||
Create new subcommunity in community
|
||||
- POST http://localhost:8080/rest/communities/:ID/communities
|
||||
|
||||
Update community
|
||||
- PUT http://localhost:8080/rest/communities/:ID
|
||||
|
||||
Delete community
|
||||
- DELETE http://localhost:8080/rest/communities/:ID
|
||||
|
||||
Delete subcollection in community
|
||||
- DELETE http://localhost:8080/rest/communities/:ID/collections/:ID
|
||||
|
||||
Delete subcommunity in community
|
||||
- DELETE http://localhost:8080/rest/communities/:ID/communities/:ID
|
||||
|
||||
View a specific community, list its subcommunities, and subcollections
|
||||
- http://localhost:8080/rest/communities/:ID?expand=all
|
||||
|
||||
###Collections
|
||||
View the list of collections
|
||||
- http://localhost:8080/rest/collections
|
||||
- GET http://localhost:8080/rest/collections[?expand={items,parentCommunityList,license,logo,all}]
|
||||
|
||||
View a specific collection
|
||||
- http://localhost:8080/rest/collections/:ID
|
||||
- GET http://localhost:8080/rest/collections/:ID[?expand={items,parentCommunityList,license,logo,all}]
|
||||
|
||||
View items in collection
|
||||
- GET http://localhost:8080/rest/collections/:ID/items[?expand={metadata,parentCollection,parentcollectionList,parentCommunityList,bitstreams,all}]
|
||||
|
||||
Create item in collection
|
||||
- POST http://localhost:8080/rest/collections/:ID/items
|
||||
|
||||
Find collection by name
|
||||
- POST http://localhost:8080/rest/collections/find-collection
|
||||
|
||||
Update collection
|
||||
- PUT http://localhost:8080/rest/collections/:ID
|
||||
|
||||
Delete collection
|
||||
- DELETE http://localhost:8080/rest/collections/:ID
|
||||
|
||||
Delete item in collection
|
||||
- DELETE http://localhost:8080/rest/collections/:ID/items/:ID
|
||||
|
||||
View a specific collection, and its items
|
||||
- http://localhost:8080/rest/collections/:ID?expand=all
|
||||
|
||||
###Items
|
||||
View an Item, and see its bitstreams
|
||||
- http://localhost:8080/rest/items/:ID
|
||||
View the list of items
|
||||
- GET http://localhost:8080/rest/items[?expand={metadata,parentCollection,parentcollectionList,parentCommunityList,bitstreams,all}]
|
||||
|
||||
View speciific item
|
||||
- GET http://localhost:8080/rest/items/:ID[?expand={metadata,parentCollection,parentcollectionList,parentCommunityList,bitstreams,all}]
|
||||
|
||||
View an Item and view its bitstreams
|
||||
- GET http://localhost:8080/rest/items/:ID/bitstreams[?expand={parent,policies,all}]
|
||||
|
||||
View an Item, and view its metadata
|
||||
- GET http://localhost:8080/rest/items/:ID/metadata
|
||||
|
||||
Find item by metadata
|
||||
- POST http://localhost:8080/rest/items/find-by-metadata-field
|
||||
|
||||
Add metadata to item
|
||||
- POST http://localhost:8080/rest/items/:ID/metadata
|
||||
|
||||
Create bitstream in item
|
||||
- POST http://localhost:8080/rest/items/:ID/bitstreams
|
||||
|
||||
Update metadata in item
|
||||
- PUT http://localhost:8080/rest/items/:ID/metadata
|
||||
|
||||
Delete item
|
||||
- DELETE http://localhost:8080/rest/items/:ID
|
||||
|
||||
Delete all metadata in item
|
||||
- DELETE http://localhost:8080/rest/items/:ID/metadata
|
||||
|
||||
Delete bitstream in item
|
||||
- DELETE http://localhost:8080/rest/items/:ID/bitstreams/:ID
|
||||
|
||||
|
||||
###Bitstreams
|
||||
View the list of bitstreams
|
||||
- GET http://localhost:8080/rest/bitstreams[?expand={parent,policies,all}]
|
||||
|
||||
View information about a bitstream
|
||||
- http://localhost:8080/rest/bitstreams/:ID
|
||||
- GET http://localhost:8080/rest/bitstreams/:ID[?expand={parent,policies,all}]
|
||||
|
||||
View/Download a specific Bitstream
|
||||
- http://localhost:8080/rest/bitstreams/:ID/retrieve
|
||||
- GET http://localhost:8080/rest/bitstreams/:ID/retrieve
|
||||
|
||||
View the list of policies of bitstream
|
||||
- GET http://localhost:8080/rest/bitstreams/:ID/policy
|
||||
|
||||
Add policy to bitstream
|
||||
- POST http://localhost:8080/rest/bitstreams/:ID/policy
|
||||
|
||||
Update bitstream
|
||||
- PUT http://localhost:8080/rest/bitstreams/:ID
|
||||
|
||||
Update data of bitstream
|
||||
- PUT http://localhost:8080/rest/bitstreams/:ID/data
|
||||
|
||||
Delete bitstream
|
||||
- DELETE http://localhost:8080/rest/bitstreams/:ID
|
||||
|
||||
Delete policy of bitstream
|
||||
- DELETE http://localhost:8080/rest/bitstreams/:ID/policy/:ID
|
||||
|
||||
|
||||
####Statistics
|
||||
Recording of statistics for view of items or download of bitstreams (set stats = true in rest.cfg to enable stats recording)
|
||||
Recording view events of items and download events of bitstreams (set stats = true in rest.cfg to enable recording of events)
|
||||
http://localhost:8080/rest/items/:ID?userIP=ip&userAgent=userAgent&xforwardedfor=xforwardedfor
|
||||
If no parameters are given the details of httprequest sender are used in statistics.
|
||||
This enables tools to record the details of their user rather then themselves.
|
||||
If no parameters are given, the details of the HTTP request sender are used in statistics.
|
||||
This enables tools like proxies to supply the details of their user rather than themselves.
|
||||
|
||||
|
||||
###Handles
|
||||
Lookup a DSpaceObject by its Handle, this produces the name/ID, that you lookup in /bitstreams, /items, /collections, /communities
|
||||
Lookup a DSpaceObject by its Handle, this produces the name/ID that you look up in /bitstreams, /items, /collections, /communities
|
||||
- http://localhost:8080/rest/handle/{prefix}/{suffix}
|
||||
|
||||
##Expand
|
||||
There is an ?expand= query parameter for more expensive operations. You can tack it on the end of endpoints.
|
||||
There is an ?expand= query parameter for more expensive operations. You can add it at the end of the request URL.
|
||||
It is optional, all, some or none. The response will usually indicate what the available "expand" options are.
|
||||
|
||||
##HTTP Responses
|
||||
* 200 OK - We have the requested object/objects
|
||||
* 200 OK - The requested object/objects exists
|
||||
* 401 Unauthorized - The anonymous user does not have READ access to that object
|
||||
* 404 Not Found - That object doesn't exist
|
||||
* 500 Server Error - Likely a SQLException, IOException, more details in the logs.
|
||||
* 404 Not Found - The specified object doesn't exist
|
||||
* 405 Method Not Allowed - Wrong request method (GET,POST,PUT,DELETE) or wrong data format (JSON/XML).
|
||||
* 415 Unsupported Media Type - Missing "Content-Type: application/json" or "Content-Type: application/xml" request header
|
||||
* 500 Server Error - Likely a SQLException, IOException, more details in the logs.
|
||||
|
@@ -3,7 +3,7 @@
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-rest</artifactId>
|
||||
<packaging>war</packaging>
|
||||
<version>5.0</version>
|
||||
<version>5.5</version>
|
||||
<name>DSpace REST :: API and Implementation</name>
|
||||
<description>DSpace RESTful Web Services API</description>
|
||||
<url>http://demo.dspace.org</url>
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>5.0</version>
|
||||
<version>5.5</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -36,18 +36,18 @@
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
<artifactId>jersey-server</artifactId>
|
||||
<version>1.17.1</version>
|
||||
<version>1.19</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
<artifactId>jersey-servlet</artifactId>
|
||||
<version>1.17.1</version>
|
||||
<version>1.19</version>
|
||||
</dependency>
|
||||
<!-- JSON serialization, should I use jackson?-->
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
<artifactId>jersey-json</artifactId>
|
||||
<version>1.17.1</version>
|
||||
<version>1.19</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Spring 3 dependencies -->
|
||||
@@ -70,7 +70,7 @@
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey.contribs</groupId>
|
||||
<artifactId>jersey-spring</artifactId>
|
||||
<version>1.8</version>
|
||||
<version>1.19</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.springframework</groupId>
|
||||
|
@@ -146,25 +146,16 @@ public class BitstreamResource extends Resource
|
||||
|
||||
log.info("Reading bitstream(id=" + bitstreamId + ") policies.");
|
||||
org.dspace.core.Context context = null;
|
||||
List<ResourcePolicy> policies = new ArrayList<ResourcePolicy>();
|
||||
ResourcePolicy[] policies = null;
|
||||
|
||||
try
|
||||
{
|
||||
context = createContext(getUser(headers));
|
||||
org.dspace.content.Bitstream dspaceBitstream = findBitstream(context, bitstreamId, org.dspace.core.Constants.READ);
|
||||
AuthorizeManager.getPolicies(context, dspaceBitstream);
|
||||
|
||||
policies = new Bitstream(dspaceBitstream,"policies").getPolicies();
|
||||
|
||||
Bundle[] bundles = dspaceBitstream.getBundles();
|
||||
for (Bundle bundle : bundles)
|
||||
{
|
||||
List<org.dspace.authorize.ResourcePolicy> bitstreamsPolicies = bundle.getBitstreamPolicies();
|
||||
for (org.dspace.authorize.ResourcePolicy policy : bitstreamsPolicies)
|
||||
{
|
||||
if (policy.getResourceID() == bitstreamId)
|
||||
{
|
||||
policies.add(new ResourcePolicy(policy));
|
||||
}
|
||||
}
|
||||
}
|
||||
context.complete();
|
||||
log.trace("Policies for bitstream(id=" + bitstreamId + ") was successfully read.");
|
||||
|
||||
@@ -184,7 +175,7 @@ public class BitstreamResource extends Resource
|
||||
processFinally(context);
|
||||
}
|
||||
|
||||
return policies.toArray(new ResourcePolicy[0]);
|
||||
return policies;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -359,42 +350,26 @@ public class BitstreamResource extends Resource
|
||||
*/
|
||||
@POST
|
||||
@Path("/{bitstream_id}/policy")
|
||||
@Consumes({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||
public javax.ws.rs.core.Response addBitstreamPolicy(@PathParam("bitstream_id") Integer bitstreamId, ResourcePolicy policy,
|
||||
@Context HttpHeaders headers)
|
||||
@QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent,
|
||||
@QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, @Context HttpServletRequest request)
|
||||
throws WebApplicationException
|
||||
{
|
||||
|
||||
log.info("Adding bitstream(id=" + bitstreamId + ") READ policy with permission for group(id=" + policy.getGroupId()
|
||||
log.info("Adding bitstream(id=" + bitstreamId + ") " + policy.getAction() + " policy with permission for group(id=" + policy.getGroupId()
|
||||
+ ").");
|
||||
org.dspace.core.Context context = null;
|
||||
|
||||
try
|
||||
{
|
||||
context = createContext(getUser(headers));
|
||||
org.dspace.content.Bitstream dspaceBitstream = findBitstream(context, bitstreamId, org.dspace.core.Constants.READ);
|
||||
org.dspace.content.Bitstream dspaceBitstream = findBitstream(context, bitstreamId, org.dspace.core.Constants.WRITE);
|
||||
|
||||
Bundle[] bundles = dspaceBitstream.getBundles();
|
||||
writeStats(dspaceBitstream, UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, headers,
|
||||
request, context);
|
||||
|
||||
for (Bundle bundle : bundles)
|
||||
{
|
||||
List<org.dspace.authorize.ResourcePolicy> bitstreamsPolicies = bundle.getBitstreamPolicies();
|
||||
|
||||
org.dspace.authorize.ResourcePolicy dspacePolicy = org.dspace.authorize.ResourcePolicy.create(context);
|
||||
dspacePolicy.setAction(policy.getActionInt());
|
||||
dspacePolicy.setGroup(Group.find(context, policy.getGroupId()));
|
||||
dspacePolicy.setResourceID(dspaceBitstream.getID());
|
||||
dspacePolicy.setResource(dspaceBitstream);
|
||||
dspacePolicy.setResourceType(org.dspace.core.Constants.BITSTREAM);
|
||||
dspacePolicy.setStartDate(policy.getStartDate());
|
||||
dspacePolicy.setEndDate(policy.getEndDate());
|
||||
dspacePolicy.setRpDescription(policy.getRpDescription());
|
||||
dspacePolicy.setRpName(policy.getRpName());
|
||||
dspacePolicy.update();
|
||||
// dspacePolicy.setRpType(org.dspace.authorize.ResourcePolicy.TYPE_CUSTOM);
|
||||
bitstreamsPolicies.add(dspacePolicy);
|
||||
|
||||
bundle.replaceAllBitstreamPolicies(bitstreamsPolicies);
|
||||
bundle.update();
|
||||
}
|
||||
addPolicyToBitstream(context, policy, dspaceBitstream);
|
||||
|
||||
context.complete();
|
||||
log.trace("Policy for bitstream(id=" + bitstreamId + ") was successfully added.");
|
||||
@@ -485,43 +460,14 @@ public class BitstreamResource extends Resource
|
||||
|
||||
if (bitstream.getPolicies() != null)
|
||||
{
|
||||
Bundle[] bundles = dspaceBitstream.getBundles();
|
||||
ResourcePolicy[] policies = bitstream.getPolicies();
|
||||
for (Bundle bundle : bundles)
|
||||
{
|
||||
List<org.dspace.authorize.ResourcePolicy> bitstreamsPolicies = bundle.getBitstreamPolicies();
|
||||
// Remove old bitstream policies
|
||||
List<org.dspace.authorize.ResourcePolicy> policiesToRemove = new ArrayList<org.dspace.authorize.ResourcePolicy>();
|
||||
for (org.dspace.authorize.ResourcePolicy policy : bitstreamsPolicies)
|
||||
{
|
||||
if (policy.getResourceID() == dspaceBitstream.getID())
|
||||
{
|
||||
policiesToRemove.add(policy);
|
||||
}
|
||||
}
|
||||
for (org.dspace.authorize.ResourcePolicy policy : policiesToRemove)
|
||||
{
|
||||
bitstreamsPolicies.remove(policy);
|
||||
}
|
||||
log.trace("Updating bitstream policies.");
|
||||
|
||||
// Add all new bitstream policies
|
||||
for (ResourcePolicy policy : policies)
|
||||
{
|
||||
org.dspace.authorize.ResourcePolicy dspacePolicy = org.dspace.authorize.ResourcePolicy.create(context);
|
||||
dspacePolicy.setAction(policy.getActionInt());
|
||||
dspacePolicy.setGroup(Group.find(context, policy.getGroupId()));
|
||||
dspacePolicy.setResourceID(dspaceBitstream.getID());
|
||||
dspacePolicy.setResource(dspaceBitstream);
|
||||
dspacePolicy.setResourceType(org.dspace.core.Constants.BITSTREAM);
|
||||
dspacePolicy.setStartDate(policy.getStartDate());
|
||||
dspacePolicy.setEndDate(policy.getEndDate());
|
||||
dspacePolicy.setRpDescription(policy.getRpDescription());
|
||||
dspacePolicy.setRpName(policy.getRpName());
|
||||
dspacePolicy.update();
|
||||
bitstreamsPolicies.add(dspacePolicy);
|
||||
}
|
||||
bundle.replaceAllBitstreamPolicies(bitstreamsPolicies);
|
||||
bundle.update();
|
||||
// Remove all old bitstream policies.
|
||||
AuthorizeManager.removeAllPolicies(context,dspaceBitstream);
|
||||
|
||||
// Add all new bitstream policies
|
||||
for (ResourcePolicy policy : bitstream.getPolicies()) {
|
||||
addPolicyToBitstream(context, policy, dspaceBitstream);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -730,55 +676,52 @@ public class BitstreamResource extends Resource
|
||||
@DELETE
|
||||
@Path("/{bitstream_id}/policy/{policy_id}")
|
||||
public javax.ws.rs.core.Response deleteBitstreamPolicy(@PathParam("bitstream_id") Integer bitstreamId,
|
||||
@PathParam("policy_id") Integer policyId, @Context HttpHeaders headers)
|
||||
@PathParam("policy_id") Integer policyId, @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent,
|
||||
@QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, @Context HttpServletRequest request)
|
||||
throws WebApplicationException
|
||||
{
|
||||
|
||||
log.info("Deleting bitstream(id=" + bitstreamId + ") READ policy(id=" + policyId + ").");
|
||||
log.info("Deleting policy(id=" + policyId + ") from bitstream(id=" + bitstreamId + ").");
|
||||
org.dspace.core.Context context = null;
|
||||
|
||||
try
|
||||
{
|
||||
context = createContext(getUser(headers));
|
||||
org.dspace.content.Bitstream dspaceBitstream = findBitstream(context, bitstreamId, org.dspace.core.Constants.READ);
|
||||
org.dspace.content.Bitstream dspaceBitstream = findBitstream(context, bitstreamId, org.dspace.core.Constants.WRITE);
|
||||
|
||||
Bundle[] bundles = dspaceBitstream.getBundles();
|
||||
writeStats(dspaceBitstream, UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, headers,
|
||||
request, context);
|
||||
|
||||
for (Bundle bundle : bundles)
|
||||
{
|
||||
List<org.dspace.authorize.ResourcePolicy> bitstreamsPolicies = bundle.getBitstreamPolicies();
|
||||
|
||||
for (org.dspace.authorize.ResourcePolicy policy : bitstreamsPolicies)
|
||||
{
|
||||
if (policy.getID() == policyId.intValue())
|
||||
{
|
||||
bitstreamsPolicies.remove(policy);
|
||||
break;
|
||||
}
|
||||
// Check if resource policy exists in bitstream.
|
||||
boolean found = false;
|
||||
List<org.dspace.authorize.ResourcePolicy> policies = AuthorizeManager.getPolicies(context, dspaceBitstream);
|
||||
for(org.dspace.authorize.ResourcePolicy policy : policies) {
|
||||
if(policy.getID() == policyId) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
bundle.replaceAllBitstreamPolicies(bitstreamsPolicies);
|
||||
bundle.update();
|
||||
if(found) {
|
||||
removePolicyFromBitstream(context, policyId, bitstreamId);
|
||||
} else {
|
||||
context.abort();
|
||||
throw new WebApplicationException(Response.Status.NOT_FOUND);
|
||||
}
|
||||
|
||||
context.complete();
|
||||
log.trace("Policy for bitstream(id=" + bitstreamId + ") was successfully added.");
|
||||
log.trace("Policy for bitstream(id=" + bitstreamId + ") was successfully removed.");
|
||||
|
||||
}
|
||||
catch (SQLException e)
|
||||
{
|
||||
processException("Someting went wrong while deleting READ policy(id=" + policyId + ") to bitstream(id=" + bitstreamId
|
||||
processException("Someting went wrong while deleting policy(id=" + policyId + ") to bitstream(id=" + bitstreamId
|
||||
+ "), SQLException! Message: " + e, context);
|
||||
}
|
||||
catch (ContextException e)
|
||||
{
|
||||
processException("Someting went wrong while deleting READ policy(id=" + policyId + ") to bitstream(id=" + bitstreamId
|
||||
processException("Someting went wrong while deleting policy(id=" + policyId + ") to bitstream(id=" + bitstreamId
|
||||
+ "), ContextException. Message: " + e.getMessage(), context);
|
||||
}
|
||||
catch (AuthorizeException e)
|
||||
{
|
||||
processException("Someting went wrong while deleting READ policy(id=" + policyId + ") to bitstream(id=" + bitstreamId
|
||||
+ "), AuthorizeException! Message: " + e, context);
|
||||
}
|
||||
finally
|
||||
{
|
||||
processFinally(context);
|
||||
@@ -799,6 +742,41 @@ public class BitstreamResource extends Resource
|
||||
return URLConnection.guessContentTypeFromName(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add policy(org.dspace.rest.common.ResourcePolicy) to bitstream.
|
||||
* @param context Context to create DSpace ResourcePolicy.
|
||||
* @param policy Policy which will be added to bitstream.
|
||||
* @param dspaceBitstream
|
||||
* @throws SQLException
|
||||
* @throws AuthorizeException
|
||||
*/
|
||||
private void addPolicyToBitstream(org.dspace.core.Context context, ResourcePolicy policy, org.dspace.content.Bitstream dspaceBitstream) throws SQLException, AuthorizeException {
|
||||
org.dspace.authorize.ResourcePolicy dspacePolicy = org.dspace.authorize.ResourcePolicy.create(context);
|
||||
dspacePolicy.setAction(policy.getActionInt());
|
||||
dspacePolicy.setGroup(Group.find(context, policy.getGroupId()));
|
||||
dspacePolicy.setResourceID(dspaceBitstream.getID());
|
||||
dspacePolicy.setResource(dspaceBitstream);
|
||||
dspacePolicy.setResourceType(org.dspace.core.Constants.BITSTREAM);
|
||||
dspacePolicy.setStartDate(policy.getStartDate());
|
||||
dspacePolicy.setEndDate(policy.getEndDate());
|
||||
dspacePolicy.setRpDescription(policy.getRpDescription());
|
||||
dspacePolicy.setRpName(policy.getRpName());
|
||||
|
||||
dspacePolicy.update();
|
||||
dspaceBitstream.updateLastModified();
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove policy from bitstream. But only if resourceID of policy is same as bitstream id.
|
||||
* @param context Context to delete policy.
|
||||
* @param policyID Id of resource policy, which will be deleted.
|
||||
* @param bitstreamID Id of bitstream.
|
||||
* @throws SQLException
|
||||
*/
|
||||
private void removePolicyFromBitstream(org.dspace.core.Context context, int policyID, int bitstreamID) throws SQLException {
|
||||
DatabaseManager.updateQuery(context, "DELETE FROM resourcepolicy WHERE POLICY_ID = ? AND RESOURCE_ID = ?", policyID,bitstreamID);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find bitstream from DSpace database. This encapsulatets the
|
||||
* org.dspace.content.Bitstream.find method with a check whether the item exists and
|
||||
|
@@ -270,6 +270,9 @@ public class CollectionsResource extends Resource
|
||||
writeStats(dspaceItem, UsageEvent.Action.VIEW, user_ip, user_agent, xforwardedfor,
|
||||
headers, request, context);
|
||||
}
|
||||
} else {
|
||||
//Advance the iterator to offset.
|
||||
dspaceItems.nextID();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -355,7 +358,7 @@ public class CollectionsResource extends Resource
|
||||
workspaceItem.update();
|
||||
|
||||
// Index item to browse.
|
||||
org.dspace.browse.IndexBrowse browse = new org.dspace.browse.IndexBrowse();
|
||||
org.dspace.browse.IndexBrowse browse = new org.dspace.browse.IndexBrowse(context);
|
||||
browse.indexItem(dspaceItem);
|
||||
|
||||
log.trace("Installing item to collection(id=" + collectionId + ").");
|
||||
@@ -533,8 +536,7 @@ public class CollectionsResource extends Resource
|
||||
{
|
||||
processException("Could not delete collection(id=" + collectionId + "), IOException. Message: " + e, context);
|
||||
}
|
||||
finally
|
||||
{
|
||||
finally {
|
||||
processFinally(context);
|
||||
}
|
||||
|
||||
@@ -639,8 +641,7 @@ public class CollectionsResource extends Resource
|
||||
processException("Could not delete item(id=" + itemId + ") in collection(id=" + collectionId
|
||||
+ "), IOException. Message: " + e, context);
|
||||
}
|
||||
finally
|
||||
{
|
||||
finally {
|
||||
processFinally(context);
|
||||
}
|
||||
|
||||
|
@@ -10,14 +10,16 @@ package org.dspace.rest;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeManager;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.handle.HandleManager;
|
||||
import org.dspace.rest.common.Collection;
|
||||
import org.dspace.rest.common.Community;
|
||||
import org.dspace.rest.common.DSpaceObject;
|
||||
import org.dspace.rest.common.Item;
|
||||
import org.dspace.rest.exceptions.ContextException;
|
||||
|
||||
import javax.ws.rs.*;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.HttpHeaders;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import javax.ws.rs.core.Response;
|
||||
import java.sql.SQLException;
|
||||
@@ -30,20 +32,20 @@ import java.sql.SQLException;
|
||||
* To change this template use File | Settings | File Templates.
|
||||
*/
|
||||
@Path("/handle")
|
||||
public class HandleResource {
|
||||
public class HandleResource extends Resource {
|
||||
private static Logger log = Logger.getLogger(HandleResource.class);
|
||||
private static org.dspace.core.Context context;
|
||||
|
||||
@GET
|
||||
@Path("/{prefix}/{suffix}")
|
||||
@Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
|
||||
public org.dspace.rest.common.DSpaceObject getObject(@PathParam("prefix") String prefix, @PathParam("suffix") String suffix, @QueryParam("expand") String expand) {
|
||||
public org.dspace.rest.common.DSpaceObject getObject(@PathParam("prefix") String prefix,
|
||||
@PathParam("suffix") String suffix, @QueryParam("expand") String expand,
|
||||
@Context HttpHeaders headers) throws WebApplicationException{
|
||||
org.dspace.core.Context context = null;
|
||||
DSpaceObject result = null;
|
||||
|
||||
try {
|
||||
if(context == null || !context.isValid() ) {
|
||||
context = new Context();
|
||||
//Failed SQL is ignored as a failed SQL statement, prevent: current transaction is aborted, commands ignored until end of transaction block
|
||||
context.getDBConnection().setAutoCommit(true);
|
||||
}
|
||||
context = createContext(getUser(headers));
|
||||
|
||||
org.dspace.content.DSpaceObject dso = HandleManager.resolveToObject(context, prefix + "/" + suffix);
|
||||
if(dso == null) {
|
||||
@@ -54,20 +56,31 @@ public class HandleResource {
|
||||
if(AuthorizeManager.authorizeActionBoolean(context, dso, org.dspace.core.Constants.READ)) {
|
||||
switch(dso.getType()) {
|
||||
case Constants.COMMUNITY:
|
||||
return new Community((org.dspace.content.Community) dso, expand, context);
|
||||
result = new Community((org.dspace.content.Community) dso, expand, context);
|
||||
break;
|
||||
case Constants.COLLECTION:
|
||||
return new Collection((org.dspace.content.Collection) dso, expand, context, null, null);
|
||||
result = new Collection((org.dspace.content.Collection) dso, expand, context, null, null);
|
||||
break;
|
||||
case Constants.ITEM:
|
||||
return new Item((org.dspace.content.Item) dso, expand, context);
|
||||
result = new Item((org.dspace.content.Item) dso, expand, context);
|
||||
break;
|
||||
default:
|
||||
return new DSpaceObject(dso);
|
||||
result = new DSpaceObject(dso);
|
||||
}
|
||||
} else {
|
||||
throw new WebApplicationException(Response.Status.UNAUTHORIZED);
|
||||
}
|
||||
|
||||
context.complete();
|
||||
|
||||
} catch (SQLException e) {
|
||||
log.error(e.getMessage());
|
||||
throw new WebApplicationException(Response.Status.INTERNAL_SERVER_ERROR);
|
||||
processException("Could not read handle(" + prefix + "/" + suffix + "), SQLException. Message: " + e.getMessage(), context);
|
||||
} catch (ContextException e) {
|
||||
processException("Could not read handle(" + prefix + "/" + suffix + "), ContextException. Message: " + e.getMessage(), context);
|
||||
} finally{
|
||||
processFinally(context);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user