mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-07 01:54:22 +00:00
Compare commits
258 Commits
6ac823d29a
...
dspace-5.6
Author | SHA1 | Date | |
---|---|---|---|
![]() |
03724151be | ||
![]() |
52db795b72 | ||
![]() |
5f3f552078 | ||
![]() |
39f4db91da | ||
![]() |
04ba49ba56 | ||
![]() |
1aa92f8d00 | ||
![]() |
85f2195396 | ||
![]() |
c5cdedb0c6 | ||
![]() |
b805aaf1dd | ||
![]() |
da315a4911 | ||
![]() |
ea4e3ee857 | ||
![]() |
1c4089c6b2 | ||
![]() |
9e0208fa96 | ||
![]() |
76d6dec743 | ||
![]() |
427ba190a6 | ||
![]() |
bdd4eb20dc | ||
![]() |
c7cbd44330 | ||
![]() |
50a4f046d4 | ||
![]() |
b5330b7815 | ||
![]() |
4fed285c83 | ||
![]() |
9390016397 | ||
![]() |
b3c7f0a7f1 | ||
![]() |
8da8431869 | ||
![]() |
2549e643f9 | ||
![]() |
ac0721767b | ||
![]() |
679c971ec3 | ||
![]() |
b50d35d3f3 | ||
![]() |
d6412e9af3 | ||
![]() |
067c1b1a95 | ||
![]() |
20026af124 | ||
![]() |
b3f9ea0eaa | ||
![]() |
987a16d23f | ||
![]() |
43d44aa0cc | ||
![]() |
307d577b35 | ||
![]() |
04c60ba939 | ||
![]() |
462360ed4d | ||
![]() |
c6fda557f7 | ||
![]() |
e73f83f7a4 | ||
![]() |
9f0f5940e7 | ||
![]() |
88ed833e2c | ||
![]() |
91d4081b03 | ||
![]() |
d9e986d669 | ||
![]() |
132f37a10a | ||
![]() |
98a26fa3e7 | ||
![]() |
4f5f5acdbe | ||
![]() |
212011cc75 | ||
![]() |
e7b49d8310 | ||
![]() |
a70f0bdd22 | ||
![]() |
a84763a258 | ||
![]() |
5a1028a7a9 | ||
![]() |
16b123e9df | ||
![]() |
f057ed8c07 | ||
![]() |
875bb59eb0 | ||
![]() |
2c09aea8fd | ||
![]() |
533245c8dd | ||
![]() |
875bba3add | ||
![]() |
55e623d1c2 | ||
![]() |
81a6d173ca | ||
![]() |
3ff604742b | ||
![]() |
3bfe7b8ea8 | ||
![]() |
ee62f9d6f0 | ||
![]() |
be35b0450b | ||
![]() |
8c94edc29c | ||
![]() |
2bf0275678 | ||
![]() |
86ca33eaa3 | ||
![]() |
f64d4b3367 | ||
![]() |
c908997900 | ||
![]() |
e2dd1089c9 | ||
![]() |
8809150e66 | ||
![]() |
1fd2723848 | ||
![]() |
454f40b3f4 | ||
![]() |
f05c9e794f | ||
![]() |
56fc41cac3 | ||
![]() |
0175e5edff | ||
![]() |
d17886c1cd | ||
![]() |
06668c363e | ||
![]() |
4b3a07120c | ||
![]() |
50c4a54bd6 | ||
![]() |
0aabf5d780 | ||
![]() |
04ce6ff2f4 | ||
![]() |
1f8f6241c2 | ||
![]() |
4a2f392ed8 | ||
![]() |
fac705ec3f | ||
![]() |
e1263249f5 | ||
![]() |
553b1a72c5 | ||
![]() |
6242865207 | ||
![]() |
59fa31641a | ||
![]() |
58344b610f | ||
![]() |
563d90f7c4 | ||
![]() |
131555604a | ||
![]() |
fbde108024 | ||
![]() |
2c59a9dd35 | ||
![]() |
d307c56d07 | ||
![]() |
1d2b954889 | ||
![]() |
69cfc61167 | ||
![]() |
b944ceb112 | ||
![]() |
9885ed851a | ||
![]() |
52ce1eb52b | ||
![]() |
deeef45943 | ||
![]() |
ad21875ac8 | ||
![]() |
4ee79a3d89 | ||
![]() |
c01c3af153 | ||
![]() |
f493a475fd | ||
![]() |
a3a5f562c9 | ||
![]() |
3479b0a254 | ||
![]() |
39289b6762 | ||
![]() |
edf7ea6524 | ||
![]() |
2045fee8ab | ||
![]() |
bac9beaffa | ||
![]() |
569ad5f546 | ||
![]() |
b465f26646 | ||
![]() |
ad19c3aeb6 | ||
![]() |
34c20d49ad | ||
![]() |
eaa08adb62 | ||
![]() |
15f3c247bc | ||
![]() |
2a44765f39 | ||
![]() |
87c34f1f1c | ||
![]() |
fce84880bc | ||
![]() |
3f94c3acb4 | ||
![]() |
50cb865ea2 | ||
![]() |
a9b8d8bfbc | ||
![]() |
600f680cd6 | ||
![]() |
01d7d060d7 | ||
![]() |
4a6663c2f4 | ||
![]() |
b3c87b2be7 | ||
![]() |
ac08b6a4e3 | ||
![]() |
a2f5fe34eb | ||
![]() |
ace19199e5 | ||
![]() |
6d9fa26535 | ||
![]() |
3efe549774 | ||
![]() |
734744ec4f | ||
![]() |
829c30bab4 | ||
![]() |
83cb04ed53 | ||
![]() |
0911d60290 | ||
![]() |
9bb7036857 | ||
![]() |
e0368f3ade | ||
![]() |
660217c3f9 | ||
![]() |
5f13b8cc64 | ||
![]() |
a2caabc79a | ||
![]() |
cb9710cda4 | ||
![]() |
56abebaece | ||
![]() |
0310db74aa | ||
![]() |
3e1bac69df | ||
![]() |
ec86af5a82 | ||
![]() |
79e111996b | ||
![]() |
f4c6f2680c | ||
![]() |
f3487be040 | ||
![]() |
87d0770974 | ||
![]() |
1c9fa656aa | ||
![]() |
59ff964f4f | ||
![]() |
10c4661885 | ||
![]() |
afe9c1294f | ||
![]() |
7a54972ed1 | ||
![]() |
b2cb0ef4dd | ||
![]() |
5edf641d6c | ||
![]() |
d9b14a86f0 | ||
![]() |
7b8fa49632 | ||
![]() |
b5540d5999 | ||
![]() |
494ff0c4c1 | ||
![]() |
1c4c8943a9 | ||
![]() |
5cd56fb834 | ||
![]() |
ed89d6b00e | ||
![]() |
19b28f4734 | ||
![]() |
4a8fdf6843 | ||
![]() |
d040b9dd4e | ||
![]() |
4036bf781a | ||
![]() |
d011e24f74 | ||
![]() |
0e9f78e9df | ||
![]() |
254097b2e2 | ||
![]() |
8049cef23b | ||
![]() |
de842dbf30 | ||
![]() |
8bcac58154 | ||
![]() |
511b78277f | ||
![]() |
dbd019943a | ||
![]() |
7d8a9d5636 | ||
![]() |
2ab6b10a03 | ||
![]() |
cd7789e8df | ||
![]() |
9287aa891f | ||
![]() |
a99203382c | ||
![]() |
6ec649df78 | ||
![]() |
e9f4e4c2cc | ||
![]() |
18cc6bb3ff | ||
![]() |
8094d8fe18 | ||
![]() |
b7a469d53c | ||
![]() |
f168c6c33d | ||
![]() |
981b62d9e9 | ||
![]() |
2c42d71a6a | ||
![]() |
ca6bc57c6d | ||
![]() |
0f0be17d0a | ||
![]() |
5e5a7922d0 | ||
![]() |
bb4cb39373 | ||
![]() |
a257f516fa | ||
![]() |
9d8284d85f | ||
![]() |
57efa4f628 | ||
![]() |
5b5f44085a | ||
![]() |
46ce2741bc | ||
![]() |
0b799fc882 | ||
![]() |
04b57a60b3 | ||
![]() |
02b4314046 | ||
![]() |
3d79fa76ab | ||
![]() |
ca1803ae93 | ||
![]() |
9046ec21d4 | ||
![]() |
b30654e3d5 | ||
![]() |
ee19e11e6d | ||
![]() |
a990c97959 | ||
![]() |
56816b13ba | ||
![]() |
b414aaa195 | ||
![]() |
1a1ae35ec9 | ||
![]() |
1029f393e4 | ||
![]() |
c1039dfe26 | ||
![]() |
cc96646e37 | ||
![]() |
d2ad7c81de | ||
![]() |
00e9c1131f | ||
![]() |
77cc9abe49 | ||
![]() |
91018bfe0f | ||
![]() |
7f9bcb283f | ||
![]() |
ae11c1c795 | ||
![]() |
9cd5fa596b | ||
![]() |
e10b10224a | ||
![]() |
e08886ae09 | ||
![]() |
df3ffcf7f9 | ||
![]() |
0c77f7be91 | ||
![]() |
cdc8e3144e | ||
![]() |
92847079d7 | ||
![]() |
b023c36941 | ||
![]() |
aee3b0b710 | ||
![]() |
d0c8afb601 | ||
![]() |
e9c14bbcea | ||
![]() |
2eca19daa3 | ||
![]() |
bcc7a75baa | ||
![]() |
19222e9341 | ||
![]() |
8124a61738 | ||
![]() |
09007146d0 | ||
![]() |
e715c64404 | ||
![]() |
53ff4510ac | ||
![]() |
495031001d | ||
![]() |
97e89384f1 | ||
![]() |
72913cda76 | ||
![]() |
03097aaa35 | ||
![]() |
f6d3f67b52 | ||
![]() |
62e0ac462e | ||
![]() |
54310b014b | ||
![]() |
beaf54f624 | ||
![]() |
114f1e0985 | ||
![]() |
1fdfe05c4c | ||
![]() |
9c1f91d40b | ||
![]() |
39711b332f | ||
![]() |
6cfda147b4 | ||
![]() |
eabdc610a0 | ||
![]() |
da74f5aa7e | ||
![]() |
14c575a7c4 | ||
![]() |
d8c8d28c13 | ||
![]() |
bf56f1f7e3 | ||
![]() |
8046d154ee | ||
![]() |
589117e204 | ||
![]() |
e9e5423f97 | ||
![]() |
c08f447cec | ||
![]() |
cf25175155 |
@@ -1,4 +1,5 @@
|
||||
language: java
|
||||
sudo: false
|
||||
|
||||
env:
|
||||
# Give Maven 1GB of memory to work with
|
||||
|
2
LICENSE
2
LICENSE
@@ -1,7 +1,7 @@
|
||||
DSpace source code license:
|
||||
|
||||
|
||||
Copyright (c) 2002-2013, DuraSpace. All rights reserved.
|
||||
Copyright (c) 2002-2015, DuraSpace. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
|
@@ -266,22 +266,24 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Morfologik Stemming Dictionary for Polish (org.carrot2:morfologik-polish:1.7.1 - http://morfologik.blogspot.com/morfologik-polish/)
|
||||
* Morfologik Stemming APIs (org.carrot2:morfologik-stemming:1.7.1 - http://morfologik.blogspot.com/morfologik-stemming/)
|
||||
* databene ContiPerf (org.databene:contiperf:2.2.0 - http://databene.org/contiperf)
|
||||
* DSpace Kernel :: API and Implementation (org.dspace:dspace-api:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api)
|
||||
* DSpace JSP-UI (org.dspace:dspace-jspui:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui)
|
||||
* DSpace OAI-PMH (org.dspace:dspace-oai:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai)
|
||||
* DSpace RDF (org.dspace:dspace-rdf:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf)
|
||||
* DSpace REST :: API and Implementation (org.dspace:dspace-rest:5.0-rc4-SNAPSHOT - http://demo.dspace.org)
|
||||
* DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services)
|
||||
* Apache Solr Webapp (org.dspace:dspace-solr:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr)
|
||||
* DSpace SWORD (org.dspace:dspace-sword:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword)
|
||||
* DSpace SWORD v2 (org.dspace:dspace-swordv2:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2)
|
||||
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui)
|
||||
* DSpace Kernel :: API and Implementation (org.dspace:dspace-api:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api)
|
||||
* DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:5.0.4 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-api-lang)
|
||||
* DSpace JSP-UI (org.dspace:dspace-jspui:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui)
|
||||
* DSpace OAI-PMH (org.dspace:dspace-oai:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai)
|
||||
* DSpace RDF (org.dspace:dspace-rdf:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf)
|
||||
* DSpace REST :: API and Implementation (org.dspace:dspace-rest:5.3-SNAPSHOT - http://demo.dspace.org)
|
||||
* DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services)
|
||||
* Apache Solr Webapp (org.dspace:dspace-solr:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr)
|
||||
* DSpace SWORD (org.dspace:dspace-sword:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword)
|
||||
* DSpace SWORD v2 (org.dspace:dspace-swordv2:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2)
|
||||
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui)
|
||||
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:5.0.5 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-xmlui-lang)
|
||||
* handle (org.dspace:handle:6.2 - no url defined)
|
||||
* jargon (org.dspace:jargon:1.4.25 - no url defined)
|
||||
* mets (org.dspace:mets:1.5.2 - no url defined)
|
||||
* oclc-harvester2 (org.dspace:oclc-harvester2:0.1.12 - no url defined)
|
||||
* Repackaged Cocoon Servlet Service Implementation (org.dspace.dependencies.cocoon:dspace-cocoon-servlet-service-impl:1.0.3 - http://projects.dspace.org/dspace-pom/dspace-cocoon-servlet-service-impl)
|
||||
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
|
||||
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:5.3-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
|
||||
* Hamcrest All (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all)
|
||||
* Hamcrest Core (org.hamcrest:hamcrest-core:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-core)
|
||||
* JBibTeX (org.jbibtex:jbibtex:1.0.10 - http://www.jbibtex.org)
|
||||
@@ -386,8 +388,3 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Dough Lea's util.concurrent package (concurrent:concurrent:1.3.4 - no url defined)
|
||||
* Reflections (org.reflections:reflections:0.9.9-RC1 - http://code.google.com/p/reflections/reflections/)
|
||||
* XZ for Java (org.tukaani:xz:1.4 - http://tukaani.org/xz/java.html)
|
||||
|
||||
Unknown license:
|
||||
|
||||
* DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:5.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-api-lang)
|
||||
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:5.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-xmlui-lang)
|
||||
|
@@ -66,14 +66,12 @@ db.password=dspace
|
||||
#db.username=dspace
|
||||
#db.password=dspace
|
||||
|
||||
# Schema name - if your database contains multiple schemas, you can avoid problems with
|
||||
# retrieving the definitions of duplicate object names by specifying
|
||||
# the schema name here that is used for DSpace by uncommenting the following entry
|
||||
|
||||
# NOTE: this configuration option is for PostgreSQL only. For Oracle, schema is equivalent
|
||||
# to user name. DSpace depends on the PostgreSQL understanding of schema. If you are using
|
||||
# Oracle, just leave this this value blank.
|
||||
|
||||
# Schema name - if your database contains multiple schemas, you can avoid
|
||||
# problems with retrieving the definitions of duplicate object names by
|
||||
# specifying the schema name that is used for DSpace.
|
||||
# ORACLE USAGE NOTE: In Oracle, schema is equivalent to "username". This means
|
||||
# specifying a "db.schema" is often unnecessary (i.e. you can leave it blank),
|
||||
# UNLESS your Oracle DB Account (in db.username) has access to multiple schemas.
|
||||
db.schema =
|
||||
|
||||
# Maximum number of DB connections in pool
|
||||
|
@@ -12,7 +12,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>5.0</version>
|
||||
<version>5.6</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -603,13 +603,21 @@
|
||||
<groupId>com.google.apis</groupId>
|
||||
<artifactId>google-api-services-analytics</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.api-client</groupId>
|
||||
<artifactId>google-api-client</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.http-client</groupId>
|
||||
<artifactId>google-http-client</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.http-client</groupId>
|
||||
<artifactId>google-http-client-jackson2</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.oauth-client</groupId>
|
||||
<artifactId>google-oauth-client-jetty</artifactId>
|
||||
<artifactId>google-oauth-client</artifactId>
|
||||
</dependency>
|
||||
<!-- FindBugs -->
|
||||
<dependency>
|
||||
|
@@ -113,8 +113,8 @@ public class CommunityFiliator
|
||||
CommunityFiliator filiator = new CommunityFiliator();
|
||||
Context c = new Context();
|
||||
|
||||
// ve are superuser!
|
||||
c.setIgnoreAuthorization(true);
|
||||
// we are superuser!
|
||||
c.turnOffAuthorisationSystem();
|
||||
|
||||
try
|
||||
{
|
||||
|
@@ -228,7 +228,7 @@ public final class CreateAdministrator
|
||||
{
|
||||
// Of course we aren't an administrator yet so we need to
|
||||
// circumvent authorisation
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
// Find administrator group
|
||||
Group admins = Group.find(context, 1);
|
||||
|
@@ -88,7 +88,7 @@ public class MetadataExporter
|
||||
{
|
||||
// create a context
|
||||
Context context = new Context();
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
OutputFormat xmlFormat = new OutputFormat(Method.XML, "UTF-8", true);
|
||||
xmlFormat.setLineWidth(120);
|
||||
|
@@ -1169,10 +1169,8 @@ public class MetadataImport
|
||||
*/
|
||||
private static boolean isAuthorityControlledField(String md)
|
||||
{
|
||||
int pos = md.indexOf("[");
|
||||
String mdf = (pos > -1 ? md.substring(0, pos) : md);
|
||||
pos = md.indexOf(":");
|
||||
mdf = (pos > -1 ? md.substring(pos+1) : md);
|
||||
String mdf = StringUtils.substringAfter(md, ":");
|
||||
mdf = StringUtils.substringBefore(mdf, "[");
|
||||
return authorityControlled.contains(mdf);
|
||||
}
|
||||
|
||||
|
@@ -209,7 +209,7 @@ public class ItemExport
|
||||
}
|
||||
|
||||
Context c = new Context();
|
||||
c.setIgnoreAuthorization(true);
|
||||
c.turnOffAuthorisationSystem();
|
||||
|
||||
if (myType == Constants.ITEM)
|
||||
{
|
||||
|
@@ -2148,7 +2148,7 @@ public class ItemImport
|
||||
context = new Context();
|
||||
eperson = EPerson.find(context, oldEPerson.getID());
|
||||
context.setCurrentUser(eperson);
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
boolean isResume = theResumeDir!=null;
|
||||
|
||||
|
@@ -351,7 +351,7 @@ public class ItemUpdate {
|
||||
|
||||
context = new Context();
|
||||
iu.setEPerson(context, iu.eperson);
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
HANDLE_PREFIX = ConfigurationManager.getProperty("handle.canonical.prefix");
|
||||
if (HANDLE_PREFIX == null || HANDLE_PREFIX.length() == 0)
|
||||
@@ -362,19 +362,20 @@ public class ItemUpdate {
|
||||
iu.processArchive(context, sourcedir, itemField, metadataIndexName, alterProvenance, isTest);
|
||||
|
||||
context.complete(); // complete all transactions
|
||||
context.setIgnoreAuthorization(false);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
if (context != null && context.isValid())
|
||||
{
|
||||
context.abort();
|
||||
context.setIgnoreAuthorization(false);
|
||||
}
|
||||
e.printStackTrace();
|
||||
pr(e.toString());
|
||||
status = 1;
|
||||
}
|
||||
finally {
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
if (isTest)
|
||||
{
|
||||
|
@@ -11,6 +11,7 @@ import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.List;
|
||||
import java.util.TreeMap;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.servicemanager.DSpaceKernelImpl;
|
||||
import org.dspace.servicemanager.DSpaceKernelInit;
|
||||
@@ -275,9 +276,21 @@ public class ScriptLauncher
|
||||
*/
|
||||
private static void display()
|
||||
{
|
||||
// List all command elements
|
||||
List<Element> commands = commandConfigs.getRootElement().getChildren("command");
|
||||
System.out.println("Usage: dspace [command-name] {parameters}");
|
||||
|
||||
// Sort the commands by name.
|
||||
// We cannot just use commands.sort() because it tries to remove and
|
||||
// reinsert Elements within other Elements, and that doesn't work.
|
||||
TreeMap<String, Element> sortedCommands = new TreeMap<>();
|
||||
for (Element command : commands)
|
||||
{
|
||||
sortedCommands.put(command.getChild("name").getValue(), command);
|
||||
}
|
||||
|
||||
// Display the sorted list
|
||||
System.out.println("Usage: dspace [command-name] {parameters}");
|
||||
for (Element command : sortedCommands.values())
|
||||
{
|
||||
System.out.println(" - " + command.getChild("name").getValue() +
|
||||
": " + command.getChild("description").getValue());
|
||||
|
@@ -7,9 +7,10 @@
|
||||
*/
|
||||
package org.dspace.app.mediafilter;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
|
||||
|
||||
/**
|
||||
@@ -30,9 +31,24 @@ public class ImageMagickImageThumbnailFilter extends ImageMagickThumbnailFilter
|
||||
throws Exception
|
||||
{
|
||||
File f = inputStreamToTempFile(source, "imthumb", ".tmp");
|
||||
File f2 = getThumbnailFile(f);
|
||||
return new FileInputStream(f2);
|
||||
}
|
||||
File f2 = null;
|
||||
try
|
||||
{
|
||||
f2 = getThumbnailFile(f);
|
||||
byte[] bytes = Files.readAllBytes(f2.toPath());
|
||||
return new ByteArrayInputStream(bytes);
|
||||
}
|
||||
finally
|
||||
{
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f.delete();
|
||||
if (f2 != null)
|
||||
{
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f2.delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@@ -7,18 +7,40 @@
|
||||
*/
|
||||
package org.dspace.app.mediafilter;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
|
||||
public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter {
|
||||
public InputStream getDestinationStream(InputStream source)
|
||||
throws Exception
|
||||
{
|
||||
File f = inputStreamToTempFile(source, "impdfthumb", ".pdf");
|
||||
File f2 = getImageFile(f, 0);
|
||||
File f3 = getThumbnailFile(f2);
|
||||
return new FileInputStream(f3);
|
||||
File f2 = null;
|
||||
File f3 = null;
|
||||
try
|
||||
{
|
||||
f2 = getImageFile(f, 0);
|
||||
f3 = getThumbnailFile(f2);
|
||||
byte[] bytes = Files.readAllBytes(f3.toPath());
|
||||
return new ByteArrayInputStream(bytes);
|
||||
}
|
||||
finally
|
||||
{
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f.delete();
|
||||
if (f2 != null)
|
||||
{
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f2.delete();
|
||||
}
|
||||
if (f3 != null)
|
||||
{
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f3.delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static final String[] PDF = {"Adobe PDF"};
|
||||
|
@@ -38,6 +38,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
|
||||
{
|
||||
private static int width = 180;
|
||||
private static int height = 120;
|
||||
private static boolean flatten = true;
|
||||
static String bitstreamDescription = "IM Thumbnail";
|
||||
static final String defaultPattern = "Generated Thumbnail";
|
||||
static Pattern replaceRegex = Pattern.compile(defaultPattern);
|
||||
@@ -48,6 +49,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
|
||||
ProcessStarter.setGlobalSearchPath(s);
|
||||
width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width);
|
||||
height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height);
|
||||
flatten = ConfigurationManager.getBooleanProperty(pre + ".flatten", flatten);
|
||||
String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription");
|
||||
if (description != null) {
|
||||
bitstreamDescription = description;
|
||||
@@ -132,6 +134,10 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
|
||||
IMOperation op = new IMOperation();
|
||||
String s = "[" + page + "]";
|
||||
op.addImage(f.getAbsolutePath()+s);
|
||||
if (flatten)
|
||||
{
|
||||
op.flatten();
|
||||
}
|
||||
op.addImage(f2.getAbsolutePath());
|
||||
if (MediaFilterManager.isVerbose) {
|
||||
System.out.println("IM Image Param: "+op);
|
||||
|
@@ -33,7 +33,7 @@ import java.util.zip.GZIPOutputStream;
|
||||
* }
|
||||
* g.finish();
|
||||
* </pre>
|
||||
*
|
||||
*
|
||||
* @author Robert Tansley
|
||||
*/
|
||||
public abstract class AbstractGenerator
|
||||
@@ -59,7 +59,7 @@ public abstract class AbstractGenerator
|
||||
/**
|
||||
* Initialize this generator to write to the given directory. This must be
|
||||
* called by any subclass constructor.
|
||||
*
|
||||
*
|
||||
* @param outputDirIn
|
||||
* directory to write sitemap files to
|
||||
*/
|
||||
@@ -73,7 +73,7 @@ public abstract class AbstractGenerator
|
||||
|
||||
/**
|
||||
* Start writing a new sitemap file.
|
||||
*
|
||||
*
|
||||
* @throws IOException
|
||||
* if an error occurs creating the file
|
||||
*/
|
||||
@@ -97,7 +97,7 @@ public abstract class AbstractGenerator
|
||||
|
||||
/**
|
||||
* Add the given URL to the sitemap.
|
||||
*
|
||||
*
|
||||
* @param url
|
||||
* Full URL to add
|
||||
* @param lastMod
|
||||
@@ -129,7 +129,7 @@ public abstract class AbstractGenerator
|
||||
|
||||
/**
|
||||
* Finish with the current sitemap file.
|
||||
*
|
||||
*
|
||||
* @throws IOException
|
||||
* if an error occurs writing
|
||||
*/
|
||||
@@ -144,15 +144,18 @@ public abstract class AbstractGenerator
|
||||
* Complete writing sitemap files and write the index files. This is invoked
|
||||
* when all calls to {@link AbstractGenerator#addURL(String, Date)} have
|
||||
* been completed, and invalidates the generator.
|
||||
*
|
||||
*
|
||||
* @return number of sitemap files written.
|
||||
*
|
||||
*
|
||||
* @throws IOException
|
||||
* if an error occurs writing
|
||||
*/
|
||||
public int finish() throws IOException
|
||||
{
|
||||
closeCurrentFile();
|
||||
if (null != currentOutput)
|
||||
{
|
||||
closeCurrentFile();
|
||||
}
|
||||
|
||||
OutputStream fo = new FileOutputStream(new File(outputDir,
|
||||
getIndexFilename()));
|
||||
@@ -165,13 +168,13 @@ public abstract class AbstractGenerator
|
||||
PrintStream out = new PrintStream(fo);
|
||||
writeIndex(out, fileCount);
|
||||
out.close();
|
||||
|
||||
|
||||
return fileCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return marked-up text to be included in a sitemap about a given URL.
|
||||
*
|
||||
*
|
||||
* @param url
|
||||
* URL to add information about
|
||||
* @param lastMod
|
||||
@@ -183,14 +186,14 @@ public abstract class AbstractGenerator
|
||||
|
||||
/**
|
||||
* Return the boilerplate at the top of a sitemap file.
|
||||
*
|
||||
*
|
||||
* @return The boilerplate markup.
|
||||
*/
|
||||
public abstract String getLeadingBoilerPlate();
|
||||
|
||||
/**
|
||||
* Return the boilerplate at the end of a sitemap file.
|
||||
*
|
||||
*
|
||||
* @return The boilerplate markup.
|
||||
*/
|
||||
public abstract String getTrailingBoilerPlate();
|
||||
@@ -198,7 +201,7 @@ public abstract class AbstractGenerator
|
||||
/**
|
||||
* Return the maximum size in bytes that an individual sitemap file should
|
||||
* be.
|
||||
*
|
||||
*
|
||||
* @return the size in bytes.
|
||||
*/
|
||||
public abstract int getMaxSize();
|
||||
@@ -206,7 +209,7 @@ public abstract class AbstractGenerator
|
||||
/**
|
||||
* Return the maximum number of URLs that an individual sitemap file should
|
||||
* contain.
|
||||
*
|
||||
*
|
||||
* @return the maximum number of URLs.
|
||||
*/
|
||||
public abstract int getMaxURLs();
|
||||
@@ -214,7 +217,7 @@ public abstract class AbstractGenerator
|
||||
/**
|
||||
* Return whether the written sitemap files and index should be
|
||||
* GZIP-compressed.
|
||||
*
|
||||
*
|
||||
* @return {@code true} if GZIP compression should be used, {@code false}
|
||||
* otherwise.
|
||||
*/
|
||||
@@ -222,7 +225,7 @@ public abstract class AbstractGenerator
|
||||
|
||||
/**
|
||||
* Return the filename a sitemap at the given index should be stored at.
|
||||
*
|
||||
*
|
||||
* @param number
|
||||
* index of the sitemap file (zero is first).
|
||||
* @return the filename to write the sitemap to.
|
||||
@@ -231,14 +234,14 @@ public abstract class AbstractGenerator
|
||||
|
||||
/**
|
||||
* Get the filename the index should be written to.
|
||||
*
|
||||
*
|
||||
* @return the filename of the index.
|
||||
*/
|
||||
public abstract String getIndexFilename();
|
||||
|
||||
/**
|
||||
* Write the index file.
|
||||
*
|
||||
*
|
||||
* @param output
|
||||
* stream to write the index to
|
||||
* @param sitemapCount
|
||||
|
@@ -92,7 +92,7 @@ public class CreateStatReport {
|
||||
|
||||
// create context as super user
|
||||
context = new Context();
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
//get paths to directories
|
||||
outputLogDirectory = ConfigurationManager.getProperty("log.dir") + File.separator;
|
||||
|
@@ -215,7 +215,7 @@ public class LogAnalyser
|
||||
|
||||
// create context as super user
|
||||
Context context = new Context();
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
// set up our command line variables
|
||||
String myLogDir = null;
|
||||
|
@@ -151,7 +151,7 @@ public class ReportGenerator
|
||||
{
|
||||
// create context as super user
|
||||
Context context = new Context();
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
String myFormat = null;
|
||||
String myInput = null;
|
||||
|
@@ -11,6 +11,7 @@ import java.sql.SQLException;
|
||||
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
|
||||
import org.dspace.authorize.AuthorizeManager;
|
||||
import org.dspace.content.*;
|
||||
|
||||
@@ -22,20 +23,22 @@ import java.util.Enumeration;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.handle.HandleManager;
|
||||
|
||||
import org.jdom.Element;
|
||||
|
||||
/**
|
||||
@@ -757,16 +760,17 @@ public class GoogleMetadata
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch all metadata mappings
|
||||
*
|
||||
* Fetch retaining the order of the values for any given key in which they
|
||||
* where added (like authors).
|
||||
*
|
||||
* Usage: GoogleMetadata gmd = new GoogleMetadata(item); for(Entry<String,
|
||||
* String> mapping : googlemd.getMappings()) { ... }
|
||||
*
|
||||
* @return Iterable of metadata fields mapped to Google-formatted values
|
||||
*/
|
||||
public Set<Entry<String, String>> getMappings()
|
||||
public Collection<Entry<String, String>> getMappings()
|
||||
{
|
||||
return new HashSet<>(metadataMappings.entries());
|
||||
return metadataMappings.entries();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1041,7 +1045,6 @@ public class GoogleMetadata
|
||||
*/
|
||||
private Bitstream findLinkableFulltext(Item item) throws SQLException {
|
||||
Bitstream bestSoFar = null;
|
||||
int bitstreamCount = 0;
|
||||
Bundle[] contentBundles = item.getBundles("ORIGINAL");
|
||||
for (Bundle bundle : contentBundles) {
|
||||
int primaryBitstreamId = bundle.getPrimaryBitstreamID();
|
||||
@@ -1050,16 +1053,16 @@ public class GoogleMetadata
|
||||
if (candidate.getID() == primaryBitstreamId) { // is primary -> use this one
|
||||
if (isPublic(candidate)) {
|
||||
return candidate;
|
||||
}
|
||||
} else
|
||||
{
|
||||
|
||||
if (bestSoFar == null && isPublic(candidate)) { //if bestSoFar is null but the candidate is not public you don't use it and try to find another
|
||||
bestSoFar = candidate;
|
||||
}
|
||||
}
|
||||
} else if (bestSoFar == null) {
|
||||
bestSoFar = candidate;
|
||||
}
|
||||
bitstreamCount++;
|
||||
}
|
||||
}
|
||||
if (bitstreamCount > 1 || !isPublic(bestSoFar)) {
|
||||
bestSoFar = null;
|
||||
}
|
||||
|
||||
return bestSoFar;
|
||||
}
|
||||
|
@@ -279,11 +279,11 @@ public class LDAPAuthentication
|
||||
{
|
||||
log.info(LogManager.getHeader(context,
|
||||
"type=ldap-login", "type=ldap_but_already_email"));
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
eperson.setNetid(netid.toLowerCase());
|
||||
eperson.update();
|
||||
context.commit();
|
||||
context.setIgnoreAuthorization(false);
|
||||
context.restoreAuthSystemState();
|
||||
context.setCurrentUser(eperson);
|
||||
|
||||
// assign user to groups based on ldap dn
|
||||
@@ -298,7 +298,7 @@ public class LDAPAuthentication
|
||||
// TEMPORARILY turn off authorisation
|
||||
try
|
||||
{
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
eperson = EPerson.create(context);
|
||||
if (StringUtils.isNotEmpty(email))
|
||||
{
|
||||
@@ -332,7 +332,7 @@ public class LDAPAuthentication
|
||||
}
|
||||
finally
|
||||
{
|
||||
context.setIgnoreAuthorization(false);
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
log.info(LogManager.getHeader(context, "authenticate",
|
||||
@@ -354,7 +354,7 @@ public class LDAPAuthentication
|
||||
}
|
||||
finally
|
||||
{
|
||||
context.setIgnoreAuthorization(false);
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -612,7 +612,7 @@ public class X509Authentication implements AuthenticationMethod
|
||||
"from=x.509, email=" + email));
|
||||
|
||||
// TEMPORARILY turn off authorisation
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
eperson = EPerson.create(context);
|
||||
eperson.setEmail(email);
|
||||
eperson.setCanLogIn(true);
|
||||
@@ -620,7 +620,7 @@ public class X509Authentication implements AuthenticationMethod
|
||||
eperson);
|
||||
eperson.update();
|
||||
context.commit();
|
||||
context.setIgnoreAuthorization(false);
|
||||
context.restoreAuthSystemState();
|
||||
context.setCurrentUser(eperson);
|
||||
setSpecialGroupsFlag(request, email);
|
||||
return SUCCESS;
|
||||
|
@@ -20,6 +20,7 @@ import org.dspace.eperson.Group;
|
||||
import org.dspace.storage.rdbms.DatabaseManager;
|
||||
import org.dspace.storage.rdbms.TableRow;
|
||||
import org.dspace.storage.rdbms.TableRowIterator;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
|
||||
/**
|
||||
* AuthorizeManager handles all authorization checks for DSpace. For better
|
||||
@@ -295,8 +296,43 @@ public class AuthorizeManager
|
||||
}
|
||||
}
|
||||
|
||||
// In case the dso is an bundle or bitstream we must ignore custom
|
||||
// policies if it does not belong to at least one installed item (see
|
||||
// DS-2614).
|
||||
// In case the dso is an item and a corresponding workspace or workflow
|
||||
// item exist, we have to ignore custom policies (see DS-2614).
|
||||
boolean ignoreCustomPolicies = false;
|
||||
if (o instanceof Bitstream)
|
||||
{
|
||||
Bitstream b = (Bitstream) o;
|
||||
|
||||
// Ensure that this is not a collection or community logo
|
||||
DSpaceObject parent = b.getParentObject();
|
||||
if (!(parent instanceof Collection) && !(parent instanceof Community))
|
||||
{
|
||||
ignoreCustomPolicies = !isAnyItemInstalled(c, b.getBundles());
|
||||
}
|
||||
}
|
||||
if (o instanceof Bundle)
|
||||
{
|
||||
ignoreCustomPolicies = !isAnyItemInstalled(c, new Bundle[] {(Bundle) o});
|
||||
}
|
||||
if (o instanceof Item)
|
||||
{
|
||||
if (WorkspaceItem.findByItem(c, (Item) o) != null ||
|
||||
WorkflowItem.findByItem(c, (Item) o) != null)
|
||||
{
|
||||
ignoreCustomPolicies = true;
|
||||
}
|
||||
}
|
||||
|
||||
for (ResourcePolicy rp : getPoliciesActionFilter(c, o, action))
|
||||
{
|
||||
if (ignoreCustomPolicies
|
||||
&& ResourcePolicy.TYPE_CUSTOM.equals(rp.getRpType()))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
// check policies for date validity
|
||||
if (rp.isDateValid())
|
||||
{
|
||||
@@ -318,7 +354,26 @@ public class AuthorizeManager
|
||||
// default authorization is denial
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
// check whether any bundle belongs to any item that passed submission
|
||||
// and workflow process
|
||||
protected static boolean isAnyItemInstalled(Context ctx, Bundle[] bundles)
|
||||
throws SQLException
|
||||
{
|
||||
for (Bundle bundle : bundles)
|
||||
{
|
||||
for (Item item : bundle.getItems())
|
||||
{
|
||||
if (WorkspaceItem.findByItem(ctx, item) == null
|
||||
&& WorkflowItem.findByItem(ctx, item) == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////
|
||||
// admin check methods
|
||||
///////////////////////////////////////////////
|
||||
@@ -480,7 +535,9 @@ public class AuthorizeManager
|
||||
|
||||
rp.update();
|
||||
|
||||
c.turnOffAuthorisationSystem();
|
||||
o.updateLastModified();
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -534,8 +591,10 @@ public class AuthorizeManager
|
||||
rp.setRpType(type);
|
||||
|
||||
rp.update();
|
||||
|
||||
|
||||
c.turnOffAuthorisationSystem();
|
||||
o.updateLastModified();
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -799,7 +858,9 @@ public class AuthorizeManager
|
||||
drp.update();
|
||||
}
|
||||
|
||||
c.turnOffAuthorisationSystem();
|
||||
dest.updateLastModified();
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -815,12 +876,14 @@ public class AuthorizeManager
|
||||
public static void removeAllPolicies(Context c, DSpaceObject o)
|
||||
throws SQLException
|
||||
{
|
||||
o.updateLastModified();
|
||||
|
||||
// FIXME: authorization check?
|
||||
DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE "
|
||||
+ "resource_type_id= ? AND resource_id= ? ",
|
||||
o.getType(), o.getID());
|
||||
|
||||
c.turnOffAuthorisationSystem();
|
||||
o.updateLastModified();
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -837,7 +900,7 @@ public class AuthorizeManager
|
||||
throws SQLException
|
||||
{
|
||||
DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE "
|
||||
+ "resource_type_id= ? AND resource_id= ? AND rptype <> ? ",
|
||||
+ "resource_type_id= ? AND resource_id= ? AND (rptype <> ? OR rptype IS NULL)",
|
||||
o.getType(), o.getID(), type);
|
||||
}
|
||||
|
||||
@@ -861,6 +924,29 @@ public class AuthorizeManager
|
||||
+ "resource_type_id= ? AND resource_id= ? AND rptype=? ",
|
||||
o.getType(), o.getID(), type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Change all the policies related to the action (fromPolicy) of the
|
||||
* specified object to the new action (toPolicy)
|
||||
*
|
||||
* @param context
|
||||
* @param dso
|
||||
* the dspace object
|
||||
* @param fromAction
|
||||
* the action to change
|
||||
* @param toAction
|
||||
* the new action to set
|
||||
* @throws SQLException
|
||||
* @throws AuthorizeException
|
||||
*/
|
||||
public static void switchPoliciesAction(Context context, DSpaceObject dso, int fromAction, int toAction)
|
||||
throws SQLException, AuthorizeException {
|
||||
List<ResourcePolicy> rps = getPoliciesActionFilter(context, dso, fromAction);
|
||||
for (ResourcePolicy rp : rps) {
|
||||
rp.setAction(toAction);
|
||||
rp.update();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all policies from an object that match a given action. FIXME
|
||||
@@ -879,7 +965,6 @@ public class AuthorizeManager
|
||||
public static void removePoliciesActionFilter(Context context,
|
||||
DSpaceObject dso, int actionID) throws SQLException
|
||||
{
|
||||
dso.updateLastModified();
|
||||
if (actionID == -1)
|
||||
{
|
||||
// remove all policies from object
|
||||
@@ -891,6 +976,10 @@ public class AuthorizeManager
|
||||
"resource_id= ? AND action_id= ? ",
|
||||
dso.getType(), dso.getID(), actionID);
|
||||
}
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
dso.updateLastModified();
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -927,11 +1016,13 @@ public class AuthorizeManager
|
||||
public static void removeGroupPolicies(Context c, DSpaceObject o, Group g)
|
||||
throws SQLException
|
||||
{
|
||||
o.updateLastModified();
|
||||
|
||||
DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE "
|
||||
+ "resource_type_id= ? AND resource_id= ? AND epersongroup_id= ? ",
|
||||
o.getType(), o.getID(), g.getID());
|
||||
|
||||
c.turnOffAuthorisationSystem();
|
||||
o.updateLastModified();
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -950,10 +1041,13 @@ public class AuthorizeManager
|
||||
public static void removeEPersonPolicies(Context c, DSpaceObject o, EPerson e)
|
||||
throws SQLException
|
||||
{
|
||||
o.updateLastModified();
|
||||
DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE "
|
||||
+ "resource_type_id= ? AND resource_id= ? AND eperson_id= ? ",
|
||||
o.getType(), o.getID(), e.getID());
|
||||
|
||||
c.turnOffAuthorisationSystem();
|
||||
o.updateLastModified();
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -36,7 +36,7 @@ public class FixDefaultPolicies
|
||||
Context c = new Context();
|
||||
|
||||
// turn off authorization
|
||||
c.setIgnoreAuthorization(true);
|
||||
c.turnOffAuthorisationSystem();
|
||||
|
||||
//////////////////////
|
||||
// carnage begins here
|
||||
|
@@ -63,7 +63,7 @@ public class PolicySet
|
||||
Context c = new Context();
|
||||
|
||||
// turn off authorization
|
||||
c.setIgnoreAuthorization(true);
|
||||
c.turnOffAuthorisationSystem();
|
||||
|
||||
//////////////////////
|
||||
// carnage begins here
|
||||
|
@@ -354,7 +354,7 @@ public class BrowserScope
|
||||
*/
|
||||
public void setResultsPerPage(int resultsPerPage)
|
||||
{
|
||||
if (resultsPerPage > -1 || browseIndex.isTagCloudEnabled())
|
||||
if (resultsPerPage > -1 || (browseIndex != null && browseIndex.isTagCloudEnabled()))
|
||||
{
|
||||
this.resultsPerPage = resultsPerPage;
|
||||
}
|
||||
|
@@ -32,6 +32,8 @@ import org.dspace.utils.DSpace;
|
||||
*
|
||||
* @author Andrea Bollini (CILEA)
|
||||
* @author Adán Román Ruiz at arvo.es (bugfix)
|
||||
* @author Panagiotis Koutsourakis (National Documentation Centre) (bugfix)
|
||||
* @author Kostas Stamatis (National Documentation Centre) (bugfix)
|
||||
*
|
||||
*/
|
||||
public class SolrBrowseDAO implements BrowseDAO
|
||||
@@ -336,6 +338,22 @@ public class SolrBrowseDAO implements BrowseDAO
|
||||
addStatusFilter(query);
|
||||
query.setMaxResults(0);
|
||||
query.addFilterQueries("search.resourcetype:" + Constants.ITEM);
|
||||
|
||||
// We need to take into account the fact that we may be in a subset of the items
|
||||
if (authority != null)
|
||||
{
|
||||
query.addFilterQueries("{!field f="+facetField + "_authority_filter}"
|
||||
+ authority);
|
||||
}
|
||||
else if (this.value != null && !valuePartial)
|
||||
{
|
||||
query.addFilterQueries("{!field f="+facetField + "_value_filter}" + this.value);
|
||||
}
|
||||
else if (valuePartial)
|
||||
{
|
||||
query.addFilterQueries("{!field f="+facetField + "_partial}" + this.value);
|
||||
}
|
||||
|
||||
if (isAscending)
|
||||
{
|
||||
query.setQuery("bi_"+column + "_sort" + ": [* TO \"" + value + "\"}");
|
||||
@@ -343,6 +361,7 @@ public class SolrBrowseDAO implements BrowseDAO
|
||||
else
|
||||
{
|
||||
query.setQuery("bi_" + column + "_sort" + ": {\"" + value + "\" TO *]");
|
||||
query.addFilterQueries("-(bi_" + column + "_sort" + ":" + value + "*)");
|
||||
}
|
||||
boolean includeUnDiscoverable = itemsWithdrawn || !itemsDiscoverable;
|
||||
DiscoverResult resp = null;
|
||||
|
@@ -28,6 +28,7 @@ import org.dspace.workflow.WorkflowItem;
|
||||
import org.dspace.xmlworkflow.storedcomponents.CollectionRole;
|
||||
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.sql.PreparedStatement;
|
||||
@@ -294,31 +295,48 @@ public class Collection extends DSpaceObject
|
||||
* @return the collections in the system
|
||||
* @throws SQLException
|
||||
*/
|
||||
public static Collection[] findAll(Context context) throws SQLException {
|
||||
public static Collection[] findAll(Context context) throws SQLException
|
||||
{
|
||||
TableRowIterator tri = null;
|
||||
try {
|
||||
String query = "SELECT c.* FROM collection c " +
|
||||
"LEFT JOIN metadatavalue m on (m.resource_id = c.collection_id and m.resource_type_id = ? and m.metadata_field_id = ?) ";
|
||||
if(DatabaseManager.isOracle()){
|
||||
query += " ORDER BY cast(m.text_value as varchar2(128))";
|
||||
}else{
|
||||
query += " ORDER BY m.text_value";
|
||||
}
|
||||
List<Collection> collections = null;
|
||||
List<Serializable> params = new ArrayList<Serializable>();
|
||||
StringBuffer query = new StringBuffer(
|
||||
"SELECT c.*" +
|
||||
"FROM collection c " +
|
||||
"LEFT JOIN metadatavalue m ON (" +
|
||||
"m.resource_id = c.collection_id AND " +
|
||||
"m.resource_type_id = ? AND " +
|
||||
"m.metadata_field_id = ?" +
|
||||
")"
|
||||
);
|
||||
|
||||
tri = DatabaseManager.query(context,
|
||||
query,
|
||||
Constants.COLLECTION,
|
||||
MetadataField.findByElement(context, MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(), "title", null).getFieldID()
|
||||
);
|
||||
} catch (SQLException e) {
|
||||
log.error("Find all Collections - ",e);
|
||||
throw e;
|
||||
if (DatabaseManager.isOracle())
|
||||
{
|
||||
query.append(" ORDER BY cast(m.text_value as varchar2(128))");
|
||||
}
|
||||
else
|
||||
{
|
||||
query.append(" ORDER BY m.text_value");
|
||||
}
|
||||
|
||||
List<Collection> collections = new ArrayList<Collection>();
|
||||
params.add(Constants.COLLECTION);
|
||||
params.add(
|
||||
MetadataField.findByElement(
|
||||
context,
|
||||
MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(),
|
||||
"title",
|
||||
null
|
||||
).getFieldID()
|
||||
);
|
||||
|
||||
try
|
||||
{
|
||||
tri = DatabaseManager.query(
|
||||
context, query.toString(), params.toArray()
|
||||
);
|
||||
|
||||
collections = new ArrayList<Collection>();
|
||||
|
||||
while (tri.hasNext())
|
||||
{
|
||||
TableRow row = tri.next();
|
||||
@@ -337,6 +355,11 @@ public class Collection extends DSpaceObject
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (SQLException e)
|
||||
{
|
||||
log.error("Find all Collections - ", e);
|
||||
throw e;
|
||||
}
|
||||
finally
|
||||
{
|
||||
// close the TableRowIterator to free up resources
|
||||
@@ -363,31 +386,47 @@ public class Collection extends DSpaceObject
|
||||
public static Collection[] findAll(Context context, Integer limit, Integer offset) throws SQLException
|
||||
{
|
||||
TableRowIterator tri = null;
|
||||
try{
|
||||
String query = "SELECT c.* FROM collection c " +
|
||||
"LEFT JOIN metadatavalue m on (m.resource_id = c.collection_id and m.resource_type_id = ? and m.metadata_field_id = ?) ";
|
||||
List<Collection> collections = null;
|
||||
List<Serializable> params = new ArrayList<Serializable>();
|
||||
StringBuffer query = new StringBuffer(
|
||||
"SELECT c.*" +
|
||||
"FROM collection c " +
|
||||
"LEFT JOIN metadatavalue m ON (" +
|
||||
"m.resource_id = c.collection_id AND " +
|
||||
"m.resource_type_id = ? AND " +
|
||||
"m.metadata_field_id = ?" +
|
||||
")"
|
||||
);
|
||||
|
||||
if(DatabaseManager.isOracle()){
|
||||
query += " ORDER BY cast(m.text_value as varchar2(128))";
|
||||
}else{
|
||||
query += " ORDER BY m.text_value";
|
||||
}
|
||||
query += " limit ? offset ?";
|
||||
tri = DatabaseManager.query(context,
|
||||
query,
|
||||
Constants.COLLECTION,
|
||||
MetadataField.findByElement(context, MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(), "title", null).getFieldID(),
|
||||
limit,
|
||||
offset
|
||||
);
|
||||
} catch (SQLException e) {
|
||||
log.error("Find all Collections offset/limit - ",e);
|
||||
throw e;
|
||||
if (DatabaseManager.isOracle())
|
||||
{
|
||||
query.append(" ORDER BY cast(m.text_value as varchar2(128))");
|
||||
}
|
||||
List<Collection> collections = new ArrayList<Collection>();
|
||||
else
|
||||
{
|
||||
query.append(" ORDER BY m.text_value");
|
||||
}
|
||||
|
||||
params.add(Constants.COLLECTION);
|
||||
params.add(
|
||||
MetadataField.findByElement(
|
||||
context,
|
||||
MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(),
|
||||
"title",
|
||||
null
|
||||
).getFieldID()
|
||||
);
|
||||
|
||||
DatabaseManager.applyOffsetAndLimit(query, params, offset, limit);
|
||||
|
||||
try
|
||||
{
|
||||
tri = DatabaseManager.query(
|
||||
context, query.toString(), params.toArray()
|
||||
);
|
||||
|
||||
collections = new ArrayList<Collection>();
|
||||
|
||||
while (tri.hasNext())
|
||||
{
|
||||
TableRow row = tri.next();
|
||||
@@ -406,6 +445,11 @@ public class Collection extends DSpaceObject
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (SQLException e)
|
||||
{
|
||||
log.error("Find all Collections offset/limit - ", e);
|
||||
throw e;
|
||||
}
|
||||
finally
|
||||
{
|
||||
// close the TableRowIterator to free up resources
|
||||
@@ -450,13 +494,20 @@ public class Collection extends DSpaceObject
|
||||
*/
|
||||
public ItemIterator getItems(Integer limit, Integer offset) throws SQLException
|
||||
{
|
||||
String myQuery = "SELECT item.* FROM item, collection2item WHERE "
|
||||
+ "item.item_id=collection2item.item_id AND "
|
||||
+ "collection2item.collection_id= ? "
|
||||
+ "AND item.in_archive='1' limit ? offset ?";
|
||||
List<Serializable> params = new ArrayList<Serializable>();
|
||||
StringBuffer myQuery = new StringBuffer(
|
||||
"SELECT item.* " +
|
||||
"FROM item, collection2item " +
|
||||
"WHERE item.item_id = collection2item.item_id " +
|
||||
"AND collection2item.collection_id = ? " +
|
||||
"AND item.in_archive = '1'"
|
||||
);
|
||||
|
||||
TableRowIterator rows = DatabaseManager.queryTable(ourContext, "item",
|
||||
myQuery,getID(), limit, offset);
|
||||
params.add(getID());
|
||||
DatabaseManager.applyOffsetAndLimit(myQuery, params, offset, limit);
|
||||
|
||||
TableRowIterator rows = DatabaseManager.query(ourContext,
|
||||
myQuery.toString(), params.toArray());
|
||||
|
||||
return new ItemIterator(ourContext, rows);
|
||||
}
|
||||
@@ -1513,7 +1564,7 @@ public class Collection extends DSpaceObject
|
||||
|
||||
public static Collection[] findAuthorizedOptimized(Context context, int actionID) throws java.sql.SQLException
|
||||
{
|
||||
if(! ConfigurationManager.getBooleanProperty("org.dspace.content.Collection.findAuthorizedPerformanceOptimize", true)) {
|
||||
if(! ConfigurationManager.getBooleanProperty("org.dspace.content.Collection.findAuthorizedPerformanceOptimize", false)) {
|
||||
// Fallback to legacy query if config says so. The rationale could be that a site found a bug.
|
||||
return findAuthorized(context, null, actionID);
|
||||
}
|
||||
|
@@ -60,6 +60,8 @@ public class InstallItem
|
||||
IOException, AuthorizeException
|
||||
{
|
||||
Item item = is.getItem();
|
||||
Collection collection = is.getCollection();
|
||||
|
||||
IdentifierService identifierService = new DSpace().getSingletonService(IdentifierService.class);
|
||||
try {
|
||||
if(suppliedHandle == null)
|
||||
@@ -74,7 +76,15 @@ public class InstallItem
|
||||
|
||||
populateMetadata(c, item);
|
||||
|
||||
return finishItem(c, item, is);
|
||||
// Finish up / archive the item
|
||||
item = finishItem(c, item, is);
|
||||
|
||||
// As this is a BRAND NEW item, as a final step we need to remove the
|
||||
// submitter item policies created during deposit and replace them with
|
||||
// the default policies from the collection.
|
||||
item.inheritCollectionDefaultPolicies(collection);
|
||||
|
||||
return item;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -205,8 +215,18 @@ public class InstallItem
|
||||
item.addDC("description", "provenance", "en", provDescription);
|
||||
}
|
||||
|
||||
// final housekeeping when adding new Item to archive
|
||||
// common between installing and "restoring" items.
|
||||
/**
|
||||
* Final housekeeping when adding a new Item into the archive.
|
||||
* This method is used by *both* installItem() and restoreItem(),
|
||||
* so all actions here will be run for a newly added item or a restored item.
|
||||
*
|
||||
* @param c DSpace Context
|
||||
* @param item Item in question
|
||||
* @param is InProgressSubmission object
|
||||
* @return final "archived" Item
|
||||
* @throws SQLException if database error
|
||||
* @throws AuthorizeException if authorization error
|
||||
*/
|
||||
private static Item finishItem(Context c, Item item, InProgressSubmission is)
|
||||
throws SQLException, IOException, AuthorizeException
|
||||
{
|
||||
@@ -229,10 +249,6 @@ public class InstallItem
|
||||
// remove in-progress submission
|
||||
is.deleteWrapper();
|
||||
|
||||
// remove the item's policies and replace them with
|
||||
// the defaults from the collection
|
||||
item.inheritCollectionDefaultPolicies(is.getCollection());
|
||||
|
||||
// set embargo lift date and take away read access if indicated.
|
||||
EmbargoManager.setEmbargo(c, item);
|
||||
|
||||
|
@@ -24,14 +24,15 @@ import org.dspace.authorize.AuthorizeManager;
|
||||
import org.dspace.authorize.ResourcePolicy;
|
||||
import org.dspace.browse.BrowseException;
|
||||
import org.dspace.browse.IndexBrowse;
|
||||
import org.dspace.content.authority.ChoiceAuthorityManager;
|
||||
import org.dspace.content.authority.Choices;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.content.authority.Choices;
|
||||
import org.dspace.content.authority.ChoiceAuthorityManager;
|
||||
import org.dspace.event.Event;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.event.Event;
|
||||
import org.dspace.handle.HandleManager;
|
||||
import org.dspace.identifier.IdentifierException;
|
||||
import org.dspace.identifier.IdentifierService;
|
||||
@@ -40,6 +41,8 @@ import org.dspace.storage.rdbms.TableRow;
|
||||
import org.dspace.storage.rdbms.TableRowIterator;
|
||||
import org.dspace.utils.DSpace;
|
||||
import org.dspace.versioning.VersioningService;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
|
||||
|
||||
/**
|
||||
* Class representing an item in DSpace.
|
||||
@@ -263,7 +266,7 @@ public class Item extends DSpaceObject
|
||||
}
|
||||
|
||||
String query = "SELECT item.* FROM metadatavalue,item WHERE item.in_archive='1' " +
|
||||
"AND item.item_id = metadatavalue.item_id AND metadata_field_id = ?";
|
||||
"AND item.item_id = metadatavalue.resource_id AND metadatavalue.resource_type_id=2 AND metadata_field_id = ?";
|
||||
TableRowIterator rows = null;
|
||||
if (Item.ANY.equals(authority)) {
|
||||
rows = DatabaseManager.queryTable(context, "item", query, mdf.getFieldID());
|
||||
@@ -1158,8 +1161,14 @@ public class Item extends DSpaceObject
|
||||
ourContext.addEvent(new Event(Event.MODIFY, Constants.ITEM, getID(),
|
||||
"WITHDRAW", getIdentifiers(ourContext)));
|
||||
|
||||
// remove all authorization policies, saving the custom ones
|
||||
AuthorizeManager.removeAllPoliciesByDSOAndTypeNotEqualsTo(ourContext, this, ResourcePolicy.TYPE_CUSTOM);
|
||||
// switch all READ authorization policies to WITHDRAWN_READ
|
||||
AuthorizeManager.switchPoliciesAction(ourContext, this, Constants.READ, Constants.WITHDRAWN_READ);
|
||||
for (Bundle bnd : this.getBundles()) {
|
||||
AuthorizeManager.switchPoliciesAction(ourContext, bnd, Constants.READ, Constants.WITHDRAWN_READ);
|
||||
for (Bitstream bs : bnd.getBitstreams()) {
|
||||
AuthorizeManager.switchPoliciesAction(ourContext, bs, Constants.READ, Constants.WITHDRAWN_READ);
|
||||
}
|
||||
}
|
||||
|
||||
// Write log
|
||||
log.info(LogManager.getHeader(ourContext, "withdraw_item", "user="
|
||||
@@ -1217,16 +1226,28 @@ public class Item extends DSpaceObject
|
||||
ourContext.addEvent(new Event(Event.MODIFY, Constants.ITEM, getID(),
|
||||
"REINSTATE", getIdentifiers(ourContext)));
|
||||
|
||||
// authorization policies
|
||||
if (colls.length > 0)
|
||||
{
|
||||
// FIXME: not multiple inclusion friendly - just apply access
|
||||
// policies from first collection
|
||||
// remove the item's policies and replace them with
|
||||
// the defaults from the collection
|
||||
inheritCollectionDefaultPolicies(colls[0]);
|
||||
// restore all WITHDRAWN_READ authorization policies back to READ
|
||||
for (Bundle bnd : this.getBundles()) {
|
||||
AuthorizeManager.switchPoliciesAction(ourContext, bnd, Constants.WITHDRAWN_READ, Constants.READ);
|
||||
for (Bitstream bs : bnd.getBitstreams()) {
|
||||
AuthorizeManager.switchPoliciesAction(ourContext, bs, Constants.WITHDRAWN_READ, Constants.READ);
|
||||
}
|
||||
}
|
||||
|
||||
// check if the item was withdrawn before the fix DS-3097
|
||||
if (AuthorizeManager.getPoliciesActionFilter(ourContext, this, Constants.WITHDRAWN_READ).size() != 0) {
|
||||
AuthorizeManager.switchPoliciesAction(ourContext, this, Constants.WITHDRAWN_READ, Constants.READ);
|
||||
}
|
||||
|
||||
else {
|
||||
// authorization policies
|
||||
if (colls.length > 0)
|
||||
{
|
||||
// remove the item's policies and replace them with
|
||||
// the defaults from the collection
|
||||
adjustItemPolicies(getOwningCollection());
|
||||
}
|
||||
}
|
||||
|
||||
// Write log
|
||||
log.info(LogManager.getHeader(ourContext, "reinstate_item", "user="
|
||||
+ e.getEmail() + ",item_id=" + getID()));
|
||||
@@ -1750,7 +1771,12 @@ public class Item extends DSpaceObject
|
||||
// is this collection not yet created, and an item template is created
|
||||
if (getOwningCollection() == null)
|
||||
{
|
||||
return true;
|
||||
if (!isInProgressSubmission()) {
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// is this person an COLLECTION_EDITOR for the owning collection?
|
||||
@@ -1762,6 +1788,20 @@ public class Item extends DSpaceObject
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the item is an inprogress submission
|
||||
* @param context
|
||||
* @param item
|
||||
* @return <code>true</code> if the item is an inprogress submission, i.e. a WorkspaceItem or WorkflowItem
|
||||
* @throws SQLException
|
||||
*/
|
||||
public boolean isInProgressSubmission() throws SQLException {
|
||||
return WorkspaceItem.findByItem(ourContext, this) != null ||
|
||||
((ConfigurationManager.getProperty("workflow", "workflow.framework").equals("xmlworkflow")
|
||||
&& XmlWorkflowItem.findByItem(ourContext, this) != null)
|
||||
|| WorkflowItem.findByItem(ourContext, this) != null);
|
||||
}
|
||||
|
||||
public String getName()
|
||||
{
|
||||
return getMetadataFirstValue(MetadataSchema.DC_SCHEMA, "title", null, Item.ANY);
|
||||
|
@@ -27,7 +27,10 @@ import org.dspace.license.CreativeCommons;
|
||||
*
|
||||
* @author Larry Stone
|
||||
* @version $Revision: 1.0 $
|
||||
*
|
||||
* @deprecated to make uniforme JSPUI and XMLUI approach the bitstream with the license in the textual format it is no longer stored see https://jira.duraspace.org/browse/DS-2604
|
||||
*/
|
||||
@Deprecated
|
||||
public class CreativeCommonsTextStreamDisseminationCrosswalk
|
||||
implements StreamDisseminationCrosswalk
|
||||
{
|
||||
|
@@ -17,7 +17,6 @@ import java.util.Map;
|
||||
import java.util.Date;
|
||||
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.logging.Level;
|
||||
import org.apache.commons.lang.ArrayUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
@@ -435,134 +434,81 @@ public class METSRightsCrosswalk
|
||||
public void ingest(Context context, DSpaceObject dso, List<Element> ml)
|
||||
throws CrosswalkException, IOException, SQLException, AuthorizeException
|
||||
{
|
||||
// we cannot crosswalk METSRights to a SITE object
|
||||
// SITE objects are not supported by the METSRightsCrosswalk
|
||||
if (dso.getType() == Constants.SITE)
|
||||
{
|
||||
throw new CrosswalkObjectNotSupported("Wrong target object type, METSRightsCrosswalk cannot crosswalk a SITE object.");
|
||||
}
|
||||
|
||||
//First, clear all existing Policies on this DSpace Object
|
||||
// as we don't want them to conflict with policies we will be adding
|
||||
if(!ml.isEmpty())
|
||||
// If we're fed the top-level <RightsDeclarationMD> wrapper element, recurse into its guts.
|
||||
// What we need to analyze are the <Context> elements underneath it.
|
||||
if(!ml.isEmpty() && ml.get(0).getName().equals("RightsDeclarationMD"))
|
||||
{
|
||||
AuthorizeManager.removeAllPolicies(context, dso);
|
||||
ingest(context, dso, ml.get(0).getChildren());
|
||||
}
|
||||
|
||||
// Loop through each Element in the List
|
||||
List<ResourcePolicy> policies = new ArrayList<ResourcePolicy>();
|
||||
for (Element element : ml)
|
||||
else
|
||||
{
|
||||
// if we're fed a <RightsDeclarationMD> wrapper object, recurse on its guts:
|
||||
if (element.getName().equals("RightsDeclarationMD"))
|
||||
// Loop through each <Context> Element in the passed in List, creating a ResourcePolicy for each
|
||||
List<ResourcePolicy> policies = new ArrayList<>();
|
||||
for (Element element : ml)
|
||||
{
|
||||
ingest(context, dso, element.getChildren());
|
||||
}
|
||||
// "Context" section (where permissions are stored)
|
||||
else if (element.getName().equals("Context"))
|
||||
{
|
||||
//get what class of context this is
|
||||
String contextClass = element.getAttributeValue("CONTEXTCLASS");
|
||||
|
||||
if ((element.getAttributeValue("start-date") != null)
|
||||
|| (element.getAttributeValue("end-date") != null)
|
||||
|| (element.getAttributeValue("rpName") != null))
|
||||
{
|
||||
SimpleDateFormat sdf = new SimpleDateFormat( "yyyy-MM-dd" );
|
||||
try {
|
||||
ResourcePolicy rp = ResourcePolicy.create(context);
|
||||
if (element.getAttributeValue("CONTEXTCLASS").equalsIgnoreCase("GENERAL PUBLIC")) {
|
||||
Group anonGroup = Group.find(context, 0);
|
||||
rp.setGroup(anonGroup);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (element.getAttributeValue("CONTEXTCLASS").equalsIgnoreCase("REPOSITORY MGR")) {
|
||||
Group adminGroup = Group.find(context, 1);
|
||||
rp.setGroup(adminGroup);
|
||||
}
|
||||
}
|
||||
if (element.getAttributeValue("rpName") != null)
|
||||
{
|
||||
rp.setRpName(element.getAttributeValue("rpName"));
|
||||
}
|
||||
try {
|
||||
if (element.getAttributeValue("start-date") != null)
|
||||
{
|
||||
rp.setStartDate(sdf.parse(element.getAttributeValue("start-date")));
|
||||
}
|
||||
if (element.getAttributeValue("end-date") != null)
|
||||
{
|
||||
rp.setEndDate(sdf.parse(element.getAttributeValue("end-date")));
|
||||
}
|
||||
}catch (ParseException ex) {
|
||||
java.util.logging.Logger.getLogger(METSRightsCrosswalk.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
|
||||
List<Element> le = new ArrayList<Element>(element.getChildren());
|
||||
for (Element el : le)
|
||||
{
|
||||
if ((el.getAttributeValue("DISCOVER").equalsIgnoreCase("true"))
|
||||
&& (el.getAttributeValue("DISPLAY").equalsIgnoreCase("true")))
|
||||
{
|
||||
if (el.getAttributeValue("DELETE").equalsIgnoreCase("false"))
|
||||
{
|
||||
if (el.getAttributeValue("MODIFY").equalsIgnoreCase("false"))
|
||||
{
|
||||
rp.setAction(Constants.READ);
|
||||
}
|
||||
else
|
||||
{
|
||||
rp.setAction(Constants.WRITE);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (el.getAttributeValue("MODIFY").equalsIgnoreCase("true"))
|
||||
{
|
||||
rp.setAction(Constants.DELETE);
|
||||
if ((el.getAttributeValue("COPY").equalsIgnoreCase("true"))
|
||||
&&(el.getAttributeValue("DUPLICATE").equalsIgnoreCase("true"))
|
||||
&&(el.getAttributeValue("PRINT").equalsIgnoreCase("true")))
|
||||
{
|
||||
rp.setAction(Constants.ADMIN);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
policies.add(rp);
|
||||
} catch (NullPointerException ex) {
|
||||
java.util.logging.Logger.getLogger(METSRightsCrosswalk.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
assignPermissions(context, dso, policies);
|
||||
}
|
||||
else
|
||||
// Must be a "Context" section (where permissions are stored)
|
||||
if (element.getName().equals("Context"))
|
||||
{
|
||||
//also get reference to the <Permissions> element
|
||||
//get what class of context this is
|
||||
String contextClass = element.getAttributeValue("CONTEXTCLASS");
|
||||
|
||||
ResourcePolicy rp = ResourcePolicy.create(context);
|
||||
SimpleDateFormat sdf = new SimpleDateFormat( "yyyy-MM-dd" );
|
||||
|
||||
// get reference to the <Permissions> element
|
||||
// Note: we are assuming here that there will only ever be ONE <Permissions>
|
||||
// element. Currently there are no known use cases for multiple.
|
||||
Element permsElement = element.getChild("Permissions", METSRights_NS);
|
||||
if(permsElement == null) {
|
||||
log.error("No <Permissions> element was found. Skipping this <Context> element.");
|
||||
continue;
|
||||
}
|
||||
|
||||
if (element.getAttributeValue("rpName") != null)
|
||||
{
|
||||
rp.setRpName(element.getAttributeValue("rpName"));
|
||||
}
|
||||
try {
|
||||
if (element.getAttributeValue("start-date") != null)
|
||||
{
|
||||
rp.setStartDate(sdf.parse(element.getAttributeValue("start-date")));
|
||||
}
|
||||
if (element.getAttributeValue("end-date") != null)
|
||||
{
|
||||
rp.setEndDate(sdf.parse(element.getAttributeValue("end-date")));
|
||||
}
|
||||
}catch (ParseException ex) {
|
||||
log.error("Failed to parse embargo date. The date needs to be in the format 'yyyy-MM-dd'.", ex);
|
||||
}
|
||||
|
||||
//Check if this permission pertains to Anonymous users
|
||||
if(ANONYMOUS_CONTEXTCLASS.equals(contextClass))
|
||||
{
|
||||
//get DSpace Anonymous group, ID=0
|
||||
Group anonGroup = Group.find(context, 0);
|
||||
Group anonGroup = Group.find(context, Group.ANONYMOUS_ID);
|
||||
if(anonGroup==null)
|
||||
{
|
||||
throw new CrosswalkInternalException("The DSpace database has not been properly initialized. The Anonymous Group is missing from the database.");
|
||||
}
|
||||
|
||||
assignPermissions(context, dso, anonGroup, permsElement);
|
||||
rp.setGroup(anonGroup);
|
||||
} // else if this permission declaration pertains to Administrators
|
||||
else if(ADMIN_CONTEXTCLASS.equals(contextClass))
|
||||
{
|
||||
//get DSpace Administrator group, ID=1
|
||||
Group adminGroup = Group.find(context, 1);
|
||||
Group adminGroup = Group.find(context, Group.ADMIN_ID);
|
||||
if(adminGroup==null)
|
||||
{
|
||||
throw new CrosswalkInternalException("The DSpace database has not been properly initialized. The Administrator Group is missing from the database.");
|
||||
}
|
||||
|
||||
assignPermissions(context, dso, adminGroup, permsElement);
|
||||
rp.setGroup(adminGroup);
|
||||
} // else if this permission pertains to another DSpace group
|
||||
else if(GROUP_CONTEXTCLASS.equals(contextClass))
|
||||
{
|
||||
@@ -591,8 +537,8 @@ public class METSRightsCrosswalk
|
||||
+ "Please restore this group using the SITE AIP, or recreate it.");
|
||||
}
|
||||
|
||||
//assign permissions to group on this object
|
||||
assignPermissions(context, dso, group, permsElement);
|
||||
//assign group to policy
|
||||
rp.setGroup(group);
|
||||
}
|
||||
catch(PackageException pe)
|
||||
{
|
||||
@@ -600,7 +546,7 @@ public class METSRightsCrosswalk
|
||||
//We'll just wrap it as a CrosswalkException and throw it upwards
|
||||
throw new CrosswalkException(pe);
|
||||
}
|
||||
}//end if Group
|
||||
}// else if this permission pertains to a DSpace person
|
||||
else if(PERSON_CONTEXTCLASS.equals(contextClass))
|
||||
{
|
||||
//we need to find the person it pertains to
|
||||
@@ -629,89 +575,26 @@ public class METSRightsCrosswalk
|
||||
+ "Please restore this Person object using the SITE AIP, or recreate it.");
|
||||
}
|
||||
|
||||
//assign permissions to person on this object
|
||||
assignPermissions(context, dso, person, permsElement);
|
||||
//assign person to the policy
|
||||
rp.setEPerson(person);
|
||||
}//end if Person
|
||||
else
|
||||
else {
|
||||
log.error("Unrecognized CONTEXTCLASS: " + contextClass);
|
||||
}
|
||||
} //end if "Context" element
|
||||
}//end while loop
|
||||
}
|
||||
}
|
||||
|
||||
//set permissions on policy add to list of policies
|
||||
rp.setAction(parsePermissions(permsElement));
|
||||
policies.add(rp);
|
||||
} //end if "Context" element
|
||||
}//end for loop
|
||||
|
||||
/**
|
||||
* Parses the 'permsElement' (corresponding to a <code>Permissions</code>
|
||||
* element), and assigns those permissions to the specified Group
|
||||
* on the specified DSpace Object.
|
||||
*
|
||||
* @param context DSpace context object
|
||||
* @param dso The DSpace Object
|
||||
* @param group The DSpace Group
|
||||
* @param permsElement The METSRights <code>Permissions</code> element
|
||||
*/
|
||||
private void assignPermissions(Context context, DSpaceObject dso, List<ResourcePolicy> policies)
|
||||
throws SQLException, AuthorizeException
|
||||
{
|
||||
AuthorizeManager.removeAllPolicies(context, dso);
|
||||
if (policies == null){
|
||||
throw new AuthorizeException("Policies are null");
|
||||
}
|
||||
else{
|
||||
// Finally, we need to remove any existing policies from the current object,
|
||||
// and replace them with the policies provided via METSRights. NOTE:
|
||||
// if the list of policies provided by METSRights is an empty list, then
|
||||
// the final object will have no policies attached.
|
||||
AuthorizeManager.removeAllPolicies(context, dso);
|
||||
AuthorizeManager.addPolicies(context, policies, dso);
|
||||
}
|
||||
}
|
||||
|
||||
private void assignPermissions(Context context, DSpaceObject dso, Group group, Element permsElement)
|
||||
throws SQLException, AuthorizeException
|
||||
{
|
||||
//first, parse our permissions to determine which action we are allowing in DSpace
|
||||
int actionID = parsePermissions(permsElement);
|
||||
|
||||
//If action ID is less than base READ permissions (value=0),
|
||||
// then something must've gone wrong in the parsing
|
||||
if(actionID < Constants.READ)
|
||||
{
|
||||
log.warn("Unable to properly restore all access permissions on object ("
|
||||
+ "type=" + Constants.typeText[dso.getType()] + ", "
|
||||
+ "handle=" + dso.getHandle() + ", "
|
||||
+ "ID=" + dso.getID()
|
||||
+ ") for group '" + group.getName() + "'.");
|
||||
}
|
||||
|
||||
//Otherwise, add the appropriate group policy for this object
|
||||
AuthorizeManager.addPolicy(context, dso, actionID, group);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the 'permsElement' (corresponding to a <code>Permissions</code>
|
||||
* element), and assigns those permissions to the specified EPerson
|
||||
* on the specified DSpace Object.
|
||||
*
|
||||
* @param context DSpace context object
|
||||
* @param dso The DSpace Object
|
||||
* @param person The DSpace EPerson
|
||||
* @param permsElement The METSRights <code>Permissions</code> element
|
||||
*/
|
||||
private void assignPermissions(Context context, DSpaceObject dso, EPerson person, Element permsElement)
|
||||
throws SQLException, AuthorizeException
|
||||
{
|
||||
//first, parse our permissions to determine which action we are allowing in DSpace
|
||||
int actionID = parsePermissions(permsElement);
|
||||
|
||||
//If action ID is less than base READ permissions (value=0),
|
||||
// then something must've gone wrong in the parsing
|
||||
if(actionID < Constants.READ)
|
||||
{
|
||||
log.warn("Unable to properly restore all access permissions on object ("
|
||||
+ "type=" + Constants.typeText[dso.getType()] + ", "
|
||||
+ "handle=" + dso.getHandle() + ", "
|
||||
+ "ID=" + dso.getID()
|
||||
+ ") for person '" + person.getEmail() + "'.");
|
||||
}
|
||||
|
||||
//Otherwise, add the appropriate EPerson policy for this object
|
||||
AuthorizeManager.addPolicy(context, dso, actionID, person);
|
||||
} // end else
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -21,15 +21,7 @@ import java.util.zip.ZipFile;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.BitstreamFormat;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.FormatIdentifier;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.WorkspaceItem;
|
||||
import org.dspace.content.*;
|
||||
import org.dspace.content.crosswalk.CrosswalkException;
|
||||
import org.dspace.content.crosswalk.MetadataValidationException;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
@@ -37,6 +29,8 @@ import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.handle.HandleManager;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
|
||||
import org.jdom.Element;
|
||||
|
||||
/**
|
||||
@@ -324,18 +318,18 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester
|
||||
}
|
||||
else
|
||||
{
|
||||
ZipFile zip = new ZipFile(pkgFile);
|
||||
try(ZipFile zip = new ZipFile(pkgFile))
|
||||
{
|
||||
// Retrieve the manifest file entry (named mets.xml)
|
||||
ZipEntry manifestEntry = zip.getEntry(METSManifest.MANIFEST_FILE);
|
||||
|
||||
// Retrieve the manifest file entry (named mets.xml)
|
||||
ZipEntry manifestEntry = zip.getEntry(METSManifest.MANIFEST_FILE);
|
||||
|
||||
// parse the manifest and sanity-check it.
|
||||
manifest = METSManifest.create(zip.getInputStream(manifestEntry),
|
||||
validate, getConfigurationName());
|
||||
|
||||
// close the Zip file for now
|
||||
// (we'll extract the other files from zip when we need them)
|
||||
zip.close();
|
||||
if(manifestEntry!=null)
|
||||
{
|
||||
// parse the manifest and sanity-check it.
|
||||
manifest = METSManifest.create(zip.getInputStream(manifestEntry),
|
||||
validate, getConfigurationName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// return our parsed out METS manifest
|
||||
@@ -660,8 +654,24 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester
|
||||
addBitstreams(context, item, manifest, pkgFile, params, callback);
|
||||
|
||||
// have subclass manage license since it may be extra package file.
|
||||
addLicense(context, item, license, (Collection) dso
|
||||
.getParentObject(), params);
|
||||
Collection owningCollection = (Collection) dso.getParentObject();
|
||||
if(owningCollection == null)
|
||||
{
|
||||
//We are probably dealing with an item that isn't archived yet
|
||||
InProgressSubmission inProgressSubmission = WorkspaceItem.findByItem(context, item);
|
||||
if(inProgressSubmission == null)
|
||||
{
|
||||
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("xmlworkflow"))
|
||||
{
|
||||
inProgressSubmission = XmlWorkflowItem.findByItem(context, item);
|
||||
}else{
|
||||
inProgressSubmission = WorkflowItem.findByItem(context, item);
|
||||
}
|
||||
}
|
||||
owningCollection = inProgressSubmission.getCollection();
|
||||
}
|
||||
|
||||
addLicense(context, item, license, owningCollection, params);
|
||||
|
||||
// FIXME ?
|
||||
// should set lastModifiedTime e.g. when ingesting AIP.
|
||||
|
@@ -127,6 +127,8 @@ public class Constants
|
||||
*/
|
||||
public static final int ADMIN = 11;
|
||||
|
||||
public static final int WITHDRAWN_READ = 12;
|
||||
|
||||
/** Position of front page news item -- top box */
|
||||
public static final int NEWS_TOP = 0;
|
||||
|
||||
@@ -139,7 +141,7 @@ public class Constants
|
||||
public static final String[] actionText = { "READ", "WRITE",
|
||||
"OBSOLETE (DELETE)", "ADD", "REMOVE", "WORKFLOW_STEP_1",
|
||||
"WORKFLOW_STEP_2", "WORKFLOW_STEP_3", "WORKFLOW_ABORT",
|
||||
"DEFAULT_BITSTREAM_READ", "DEFAULT_ITEM_READ", "ADMIN" };
|
||||
"DEFAULT_BITSTREAM_READ", "DEFAULT_ITEM_READ", "ADMIN", "WITHDRAWN_READ" };
|
||||
|
||||
/**
|
||||
* generating constants for the relevance array dynamically is simple: just
|
||||
@@ -175,7 +177,9 @@ public class Constants
|
||||
0, // 8 - WORKFLOW_ABORT
|
||||
RCOLLECTION, // 9 - DEFAULT_BITSTREAM_READ
|
||||
RCOLLECTION, // 10 - DEFAULT_ITEM_READ
|
||||
RITEM | RCOLLECTION | RCOMMUNITY // 11 - ADMIN
|
||||
RITEM | RCOLLECTION | RCOMMUNITY, // 11 - ADMIN
|
||||
RBITSTREAM | RBUNDLE | RITEM // 12 - WITHDRAWN_READ
|
||||
|
||||
};
|
||||
|
||||
public static final String DEFAULT_ENCODING = "UTF-8";
|
||||
|
@@ -474,8 +474,18 @@ public class Email
|
||||
System.out.println(" - To: " + to);
|
||||
System.out.println(" - Subject: " + subject);
|
||||
System.out.println(" - Server: " + server);
|
||||
boolean disabled = ConfigurationManager.getBooleanProperty("mail.server.disabled", false);
|
||||
try
|
||||
{
|
||||
if( disabled)
|
||||
{
|
||||
System.err.println("\nError sending email:");
|
||||
System.err.println(" - Error: cannot test email because mail.server.disabled is set to true");
|
||||
System.err.println("\nPlease see the DSpace documentation for assistance.\n");
|
||||
System.err.println("\n");
|
||||
System.exit(1);
|
||||
return;
|
||||
}
|
||||
e.send();
|
||||
}
|
||||
catch (MessagingException me)
|
||||
|
@@ -16,6 +16,9 @@ import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.PrintWriter;
|
||||
|
||||
import org.dspace.core.service.NewsService;
|
||||
import org.dspace.utils.DSpace;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -39,6 +42,10 @@ public class NewsManager
|
||||
*/
|
||||
public static String readNewsFile(String newsFile)
|
||||
{
|
||||
NewsService newsService = new DSpace().getSingletonService(NewsService.class);
|
||||
if (!newsService.validate(newsFile)) {
|
||||
throw new IllegalArgumentException("The file "+ newsFile + " is not a valid news file");
|
||||
}
|
||||
String fileName = getNewsFilePath();
|
||||
|
||||
fileName += newsFile;
|
||||
@@ -81,6 +88,10 @@ public class NewsManager
|
||||
*/
|
||||
public static String writeNewsFile(String newsFile, String news)
|
||||
{
|
||||
NewsService newsService = new DSpace().getSingletonService(NewsService.class);
|
||||
if (!newsService.validate(newsFile)) {
|
||||
throw new IllegalArgumentException("The file "+ newsFile + " is not a valid news file");
|
||||
}
|
||||
String fileName = getNewsFilePath();
|
||||
|
||||
fileName += newsFile;
|
||||
|
@@ -0,0 +1,29 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.core.service.NewsService;
|
||||
|
||||
public class NewsServiceImpl implements NewsService {
|
||||
private List<String> acceptableFilenames;
|
||||
|
||||
public void setAcceptableFilenames(List<String> acceptableFilenames) {
|
||||
this.acceptableFilenames = acceptableFilenames;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean validate(String newsName) {
|
||||
if (acceptableFilenames != null) {
|
||||
return acceptableFilenames.contains(newsName);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,12 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.core.service;
|
||||
|
||||
public interface NewsService {
|
||||
boolean validate(String newsName);
|
||||
}
|
@@ -164,7 +164,7 @@ public class CurationCli
|
||||
}
|
||||
else
|
||||
{
|
||||
c.setIgnoreAuthorization(true);
|
||||
c.turnOffAuthorisationSystem();
|
||||
}
|
||||
|
||||
Curator curator = new Curator();
|
||||
|
@@ -218,10 +218,20 @@ public class DiscoverQuery {
|
||||
this.facetOffset = facetOffset;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the fields which you want Discovery to return in the search results.
|
||||
* It is HIGHLY recommended to limit the fields returned, as by default
|
||||
* some backends (like Solr) will return everything.
|
||||
* @param field field to add to the list of fields returned
|
||||
*/
|
||||
public void addSearchField(String field){
|
||||
this.searchFields.add(field);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of fields which Discovery will return in the search results
|
||||
* @return List of field names
|
||||
*/
|
||||
public List<String> getSearchFields() {
|
||||
return searchFields;
|
||||
}
|
||||
|
@@ -39,7 +39,7 @@ public class IndexClient {
|
||||
public static void main(String[] args) throws SQLException, IOException, SearchServiceException {
|
||||
|
||||
Context context = new Context();
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
String usage = "org.dspace.discovery.IndexClient [-cbhf[r <item handle>]] or nothing to update/clean an existing index.";
|
||||
Options options = new Options();
|
||||
|
@@ -113,4 +113,11 @@ public interface SearchService {
|
||||
* @return the indexed field
|
||||
*/
|
||||
String toSortFieldIndex(String metadataField, String type);
|
||||
|
||||
/**
|
||||
* Utility method to escape any special characters in a user's query
|
||||
* @param query
|
||||
* @return query with any special characters escaped
|
||||
*/
|
||||
String escapeQueryChars(String query);
|
||||
}
|
||||
|
@@ -119,6 +119,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
private static final Logger log = Logger.getLogger(SolrServiceImpl.class);
|
||||
|
||||
protected static final String LAST_INDEXED_FIELD = "SolrIndexer.lastIndexed";
|
||||
protected static final String HANDLE_FIELD = "handle";
|
||||
protected static final String RESOURCE_TYPE_FIELD = "search.resourcetype";
|
||||
protected static final String RESOURCE_ID_FIELD = "search.resourceid";
|
||||
|
||||
public static final String FILTER_SEPARATOR = "\n|||\n";
|
||||
|
||||
@@ -149,9 +152,11 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
|
||||
solr.setBaseURL(solrService);
|
||||
solr.setUseMultiPartPost(true);
|
||||
// Dummy/test query to search for Item (type=2) of ID=1
|
||||
SolrQuery solrQuery = new SolrQuery()
|
||||
.setQuery("search.resourcetype:2 AND search.resourceid:1");
|
||||
|
||||
.setQuery(RESOURCE_TYPE_FIELD + ":2 AND " + RESOURCE_ID_FIELD + ":1");
|
||||
// Only return obj identifier fields in result doc
|
||||
solrQuery.setFields(RESOURCE_TYPE_FIELD, RESOURCE_ID_FIELD);
|
||||
solr.query(solrQuery);
|
||||
|
||||
// As long as Solr initialized, check with DatabaseUtils to see
|
||||
@@ -323,7 +328,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
|
||||
try {
|
||||
if(getSolr() != null){
|
||||
getSolr().deleteByQuery("handle:\"" + handle + "\"");
|
||||
getSolr().deleteByQuery(HANDLE_FIELD + ":\"" + handle + "\"");
|
||||
if(commit)
|
||||
{
|
||||
getSolr().commit();
|
||||
@@ -462,10 +467,13 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
}
|
||||
if (force)
|
||||
{
|
||||
getSolr().deleteByQuery("search.resourcetype:[2 TO 4]");
|
||||
getSolr().deleteByQuery(RESOURCE_TYPE_FIELD + ":[2 TO 4]");
|
||||
} else {
|
||||
SolrQuery query = new SolrQuery();
|
||||
query.setQuery("search.resourcetype:[2 TO 4]");
|
||||
// Query for all indexed Items, Collections and Communities,
|
||||
// returning just their handle
|
||||
query.setFields(HANDLE_FIELD);
|
||||
query.setQuery(RESOURCE_TYPE_FIELD + ":[2 TO 4]");
|
||||
QueryResponse rsp = getSolr().query(query);
|
||||
SolrDocumentList docs = rsp.getResults();
|
||||
|
||||
@@ -475,7 +483,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
|
||||
SolrDocument doc = (SolrDocument) iter.next();
|
||||
|
||||
String handle = (String) doc.getFieldValue("handle");
|
||||
String handle = (String) doc.getFieldValue(HANDLE_FIELD);
|
||||
|
||||
DSpaceObject o = HandleManager.resolveToObject(context, handle);
|
||||
|
||||
@@ -616,7 +624,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
boolean inIndex = false;
|
||||
|
||||
SolrQuery query = new SolrQuery();
|
||||
query.setQuery("handle:" + handle);
|
||||
query.setQuery(HANDLE_FIELD + ":" + handle);
|
||||
// Specify that we ONLY want the LAST_INDEXED_FIELD returned in the field list (fl)
|
||||
query.setFields(LAST_INDEXED_FIELD);
|
||||
QueryResponse rsp;
|
||||
|
||||
try {
|
||||
@@ -1444,9 +1454,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
// New fields to weaken the dependence on handles, and allow for faster
|
||||
// list display
|
||||
doc.addField("search.uniqueid", type+"-"+id);
|
||||
doc.addField("search.resourcetype", Integer.toString(type));
|
||||
doc.addField(RESOURCE_TYPE_FIELD, Integer.toString(type));
|
||||
|
||||
doc.addField("search.resourceid", Integer.toString(id));
|
||||
doc.addField(RESOURCE_ID_FIELD, Integer.toString(id));
|
||||
|
||||
// want to be able to search for handle, so use keyword
|
||||
// (not tokenized, but it is indexed)
|
||||
@@ -1454,7 +1464,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
{
|
||||
// want to be able to search for handle, so use keyword
|
||||
// (not tokenized, but it is indexed)
|
||||
doc.addField("handle", handle);
|
||||
doc.addField(HANDLE_FIELD, handle);
|
||||
}
|
||||
|
||||
if (locations != null)
|
||||
@@ -1584,7 +1594,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
discoveryQuery.addFilterQueries("location:l" + dso.getID());
|
||||
} else if (dso instanceof Item)
|
||||
{
|
||||
discoveryQuery.addFilterQueries("handle:" + dso.getHandle());
|
||||
discoveryQuery.addFilterQueries(HANDLE_FIELD + ":" + dso.getHandle());
|
||||
}
|
||||
}
|
||||
return search(context, discoveryQuery, includeUnDiscoverable);
|
||||
@@ -1620,6 +1630,18 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
}
|
||||
|
||||
solrQuery.setQuery(query);
|
||||
|
||||
// Add any search fields to our query. This is the limited list
|
||||
// of fields that will be returned in the solr result
|
||||
for(String fieldName : discoveryQuery.getSearchFields())
|
||||
{
|
||||
solrQuery.addField(fieldName);
|
||||
}
|
||||
// Also ensure a few key obj identifier fields are returned with every query
|
||||
solrQuery.addField(HANDLE_FIELD);
|
||||
solrQuery.addField(RESOURCE_TYPE_FIELD);
|
||||
solrQuery.addField(RESOURCE_ID_FIELD);
|
||||
|
||||
if(discoveryQuery.isSpellCheck())
|
||||
{
|
||||
solrQuery.setParam(SpellingParams.SPELLCHECK_Q, query);
|
||||
@@ -1640,7 +1662,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
}
|
||||
if(discoveryQuery.getDSpaceObjectFilter() != -1)
|
||||
{
|
||||
solrQuery.addFilterQuery("search.resourcetype:" + discoveryQuery.getDSpaceObjectFilter());
|
||||
solrQuery.addFilterQuery(RESOURCE_TYPE_FIELD + ":" + discoveryQuery.getDSpaceObjectFilter());
|
||||
}
|
||||
|
||||
for (int i = 0; i < discoveryQuery.getFieldPresentQueries().size(); i++)
|
||||
@@ -1753,7 +1775,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
query.addFilterQueries("location:l" + dso.getID());
|
||||
} else if (dso instanceof Item)
|
||||
{
|
||||
query.addFilterQueries("handle:" + dso.getHandle());
|
||||
query.addFilterQueries(HANDLE_FIELD + ":" + dso.getHandle());
|
||||
}
|
||||
}
|
||||
return searchJSON(context, query, jsonIdentifier);
|
||||
@@ -1807,7 +1829,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
{
|
||||
result.addDSpaceObject(dso);
|
||||
} else {
|
||||
log.error(LogManager.getHeader(context, "Error while retrieving DSpace object from discovery index", "Handle: " + doc.getFirstValue("handle")));
|
||||
log.error(LogManager.getHeader(context, "Error while retrieving DSpace object from discovery index", "Handle: " + doc.getFirstValue(HANDLE_FIELD)));
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -1926,9 +1948,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
|
||||
protected static DSpaceObject findDSpaceObject(Context context, SolrDocument doc) throws SQLException {
|
||||
|
||||
Integer type = (Integer) doc.getFirstValue("search.resourcetype");
|
||||
Integer id = (Integer) doc.getFirstValue("search.resourceid");
|
||||
String handle = (String) doc.getFirstValue("handle");
|
||||
Integer type = (Integer) doc.getFirstValue(RESOURCE_TYPE_FIELD);
|
||||
Integer id = (Integer) doc.getFirstValue(RESOURCE_ID_FIELD);
|
||||
String handle = (String) doc.getFirstValue(HANDLE_FIELD);
|
||||
|
||||
if (type != null && id != null)
|
||||
{
|
||||
@@ -1981,7 +2003,8 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
|
||||
SolrQuery solrQuery = new SolrQuery();
|
||||
solrQuery.setQuery(query);
|
||||
solrQuery.setFields("search.resourceid", "search.resourcetype");
|
||||
//Only return obj identifier fields in result doc
|
||||
solrQuery.setFields(RESOURCE_ID_FIELD, RESOURCE_TYPE_FIELD);
|
||||
solrQuery.setStart(offset);
|
||||
solrQuery.setRows(max);
|
||||
if (orderfield != null)
|
||||
@@ -2001,7 +2024,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
{
|
||||
SolrDocument doc = (SolrDocument) iter.next();
|
||||
|
||||
DSpaceObject o = DSpaceObject.find(context, (Integer) doc.getFirstValue("search.resourcetype"), (Integer) doc.getFirstValue("search.resourceid"));
|
||||
DSpaceObject o = DSpaceObject.find(context, (Integer) doc.getFirstValue(RESOURCE_TYPE_FIELD), (Integer) doc.getFirstValue(RESOURCE_ID_FIELD));
|
||||
|
||||
if (o != null)
|
||||
{
|
||||
@@ -2089,7 +2112,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
try{
|
||||
SolrQuery solrQuery = new SolrQuery();
|
||||
//Set the query to handle since this is unique
|
||||
solrQuery.setQuery("handle: " + item.getHandle());
|
||||
solrQuery.setQuery(HANDLE_FIELD + ": " + item.getHandle());
|
||||
//Only return obj identifier fields in result doc
|
||||
solrQuery.setFields(HANDLE_FIELD, RESOURCE_TYPE_FIELD, RESOURCE_ID_FIELD);
|
||||
//Add the more like this parameters !
|
||||
solrQuery.setParam(MoreLikeThisParams.MLT, true);
|
||||
//Add a comma separated list of the similar fields
|
||||
@@ -2320,4 +2345,13 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
throw new SearchServiceException(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String escapeQueryChars(String query) {
|
||||
// Use Solr's built in query escape tool
|
||||
// WARNING: You should only escape characters from user entered queries,
|
||||
// otherwise you may accidentally BREAK field-based queries (which often
|
||||
// rely on special characters to separate the field from the query value)
|
||||
return ClientUtils.escapeQueryChars(query);
|
||||
}
|
||||
}
|
||||
|
@@ -91,10 +91,9 @@ public class EmbargoManager
|
||||
}
|
||||
}
|
||||
String slift = myLift.toString();
|
||||
boolean ignoreAuth = context.ignoreAuthorization();
|
||||
try
|
||||
{
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
item.clearMetadata(lift_schema, lift_element, lift_qualifier, Item.ANY);
|
||||
item.addMetadata(lift_schema, lift_element, lift_qualifier, null, slift);
|
||||
log.info("Set embargo on Item "+item.getHandle()+", expires on: "+slift);
|
||||
@@ -105,7 +104,7 @@ public class EmbargoManager
|
||||
}
|
||||
finally
|
||||
{
|
||||
context.setIgnoreAuthorization(ignoreAuth);
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -267,7 +266,7 @@ public class EmbargoManager
|
||||
try
|
||||
{
|
||||
context = new Context();
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
Date now = new Date();
|
||||
|
||||
// scan items under embargo
|
||||
|
@@ -499,7 +499,7 @@ public class EPerson extends DSpaceObject
|
||||
break;
|
||||
|
||||
case LANGUAGE:
|
||||
s = "m_text_value";
|
||||
s = "m.text_value";
|
||||
t = "language";
|
||||
break;
|
||||
case NETID:
|
||||
@@ -507,23 +507,26 @@ public class EPerson extends DSpaceObject
|
||||
break;
|
||||
|
||||
default:
|
||||
s = "m_text_value";
|
||||
s = "m.text_value";
|
||||
t = "lastname";
|
||||
}
|
||||
|
||||
// NOTE: The use of 's' in the order by clause can not cause an SQL
|
||||
// injection because the string is derived from constant values above.
|
||||
TableRowIterator rows = DatabaseManager.query(context, "SELECT * FROM eperson e ORDER BY ?",s);
|
||||
TableRowIterator rows;
|
||||
if(!t.equals("")) {
|
||||
rows = DatabaseManager.query(context,
|
||||
"SELECT * FROM eperson e " +
|
||||
"LEFT JOIN metadatavalue m on (m.resource_id = e.eperson_id and m.resource_type_id = ? and m.metadata_field_id = ?) " +
|
||||
"ORDER BY ?",
|
||||
"LEFT JOIN metadatavalue m on (m.resource_id = e.eperson_id and m.resource_type_id = ? and m.metadata_field_id = ?) " +
|
||||
"ORDER BY " + s,
|
||||
Constants.EPERSON,
|
||||
MetadataField.findByElement(context, MetadataSchema.find(context, "eperson").getSchemaID(), t, null).getFieldID(),
|
||||
s
|
||||
MetadataField.findByElement(context, MetadataSchema.find(context, "eperson").getSchemaID(), t, null).getFieldID()
|
||||
);
|
||||
}
|
||||
else {
|
||||
rows = DatabaseManager.query(context, "SELECT * FROM eperson e ORDER BY " + s);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
@@ -284,14 +284,23 @@ public class EventManager
|
||||
{
|
||||
Context ctx = new Context();
|
||||
|
||||
for (Iterator ci = ((Dispatcher) dispatcher).getConsumers()
|
||||
.iterator(); ci.hasNext();)
|
||||
{
|
||||
ConsumerProfile cp = (ConsumerProfile) ci.next();
|
||||
if (cp != null)
|
||||
try {
|
||||
|
||||
for (Iterator ci = ((Dispatcher) dispatcher).getConsumers()
|
||||
.iterator(); ci.hasNext();)
|
||||
{
|
||||
cp.getConsumer().finish(ctx);
|
||||
ConsumerProfile cp = (ConsumerProfile) ci.next();
|
||||
if (cp != null)
|
||||
{
|
||||
cp.getConsumer().finish(ctx);
|
||||
}
|
||||
}
|
||||
|
||||
ctx.complete();
|
||||
|
||||
} catch (Exception e) {
|
||||
ctx.abort();
|
||||
throw e;
|
||||
}
|
||||
return;
|
||||
|
||||
|
@@ -9,89 +9,167 @@ package org.dspace.handle;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.InputStreamReader;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.storage.rdbms.DatabaseManager;
|
||||
import org.dspace.storage.rdbms.TableRow;
|
||||
import org.dspace.search.DSIndexer;
|
||||
import org.dspace.browse.IndexBrowse;
|
||||
import org.dspace.discovery.IndexClient;
|
||||
|
||||
/**
|
||||
* A script to update the handle values in the database. This is typically used
|
||||
* when moving from a test machine (handle = 123456789) to a production service.
|
||||
* when moving from a test machine (handle = 123456789) to a production service
|
||||
* or when make a test clone from production service.
|
||||
*
|
||||
* @author Stuart Lewis
|
||||
* @author Ivo Prajer (Czech Technical University in Prague)
|
||||
*/
|
||||
public class UpdateHandlePrefix
|
||||
{
|
||||
|
||||
private static final Logger log = Logger.getLogger(UpdateHandlePrefix.class);
|
||||
|
||||
/**
|
||||
* When invoked as a command-line tool, updates handle prefix
|
||||
*
|
||||
* @param args the command-line arguments, none used
|
||||
* @throws java.lang.Exception
|
||||
*
|
||||
*/
|
||||
public static void main(String[] args) throws Exception
|
||||
{
|
||||
// There should be two paramters
|
||||
// There should be two parameters
|
||||
if (args.length < 2)
|
||||
{
|
||||
System.out.println("\nUsage: update-handle-prefix <old handle> <new handle>\n");
|
||||
System.exit(1);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Confirm with the user that this is what they want to do
|
||||
String oldH = args[0];
|
||||
String newH = args[1];
|
||||
|
||||
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
|
||||
// Get info about changes
|
||||
System.out.println("\nGetting information about handles from database...");
|
||||
Context context = new Context();
|
||||
System.out.println("If you continue, all handles in your repository with prefix " +
|
||||
oldH + " will be updated to have handle prefix " + newH + "\n");
|
||||
String sql = "SELECT count(*) as count FROM handle " +
|
||||
String sql = "SELECT count(*) as count " +
|
||||
"FROM handle " +
|
||||
"WHERE handle LIKE '" + oldH + "%'";
|
||||
TableRow row = DatabaseManager.querySingle(context, sql, new Object[] {});
|
||||
long count = row.getLongColumn("count");
|
||||
System.out.println(count + " items will be updated.\n");
|
||||
System.out.print("Have you taken a backup, and are you ready to continue? [y/n]: ");
|
||||
String choiceString = input.readLine();
|
||||
|
||||
if (choiceString.equalsIgnoreCase("y"))
|
||||
if (count > 0)
|
||||
{
|
||||
// Make the changes
|
||||
System.out.print("Updating handle table... ");
|
||||
sql = "update handle set handle = '" + newH + "' || '/' || handle_id " +
|
||||
"where handle like '" + oldH + "/%'";
|
||||
int updated = DatabaseManager.updateQuery(context, sql, new Object[] {});
|
||||
System.out.println(updated + " items updated");
|
||||
// Print info text about changes
|
||||
System.out.println(
|
||||
"In your repository will be updated " + count + " handle" +
|
||||
((count > 1) ? "s" : "") + " to new prefix " + newH +
|
||||
" from original " + oldH + "!\n"
|
||||
);
|
||||
|
||||
System.out.print("Updating metadatavalues table... ");
|
||||
sql = "UPDATE metadatavalue SET text_value= (SELECT 'http://hdl.handle.net/' || " +
|
||||
"handle FROM handle WHERE handle.resource_id=item_id AND " +
|
||||
"handle.resource_type_id=2) WHERE text_value LIKE 'http://hdl.handle.net/%';";
|
||||
updated = DatabaseManager.updateQuery(context, sql, new Object[] {});
|
||||
System.out.println(updated + " metadata values updated");
|
||||
// Confirm with the user that this is what they want to do
|
||||
System.out.print(
|
||||
"Servlet container (e.g. Apache Tomcat, Jetty, Caucho Resin) must be running.\n" +
|
||||
"If it is necessary, please make a backup of the database.\n" +
|
||||
"Are you ready to continue? [y/n]: "
|
||||
);
|
||||
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
|
||||
String choiceString = input.readLine();
|
||||
|
||||
// Commit the changes
|
||||
context.complete();
|
||||
|
||||
System.out.print("Re-creating browse and search indexes... ");
|
||||
|
||||
// Reinitialise the browse system
|
||||
IndexBrowse.main(new String[] {"-i"});
|
||||
|
||||
// Reinitialise the browse system
|
||||
try
|
||||
if (choiceString.equalsIgnoreCase("y"))
|
||||
{
|
||||
DSIndexer.main(new String[0]);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
// Not a lot we can do
|
||||
System.out.println("Error re-indexing:");
|
||||
e.printStackTrace();
|
||||
System.out.println("\nPlease manually run [dspace]/bin/index-all");
|
||||
}
|
||||
try {
|
||||
log.info("Updating handle prefix from " + oldH + " to " + newH);
|
||||
|
||||
// All done
|
||||
System.out.println("\nHandles successfully updated.");
|
||||
// Make the changes
|
||||
System.out.print("\nUpdating handle table... ");
|
||||
sql = "UPDATE handle " +
|
||||
"SET handle = '" + newH + "' || '/' || handle_id " +
|
||||
"WHERE handle like '" + oldH + "/%'";
|
||||
int updHdl = DatabaseManager.updateQuery(context, sql, new Object[] {});
|
||||
System.out.println(
|
||||
updHdl + " item" + ((updHdl > 1) ? "s" : "") + " updated"
|
||||
);
|
||||
|
||||
System.out.print("Updating metadatavalues table... ");
|
||||
sql = "UPDATE metadatavalue " +
|
||||
"SET text_value = " +
|
||||
"(" +
|
||||
"SELECT 'http://hdl.handle.net/' || handle " +
|
||||
"FROM handle " +
|
||||
"WHERE handle.resource_id = metadatavalue.resource_id " +
|
||||
"AND handle.resource_type_id = 2" +
|
||||
") " +
|
||||
"WHERE text_value LIKE 'http://hdl.handle.net/" + oldH + "/%'" +
|
||||
"AND EXISTS " +
|
||||
"(" +
|
||||
"SELECT 1 " +
|
||||
"FROM handle " +
|
||||
"WHERE handle.resource_id = metadatavalue.resource_id " +
|
||||
"AND handle.resource_type_id = 2" +
|
||||
")";
|
||||
int updMeta = DatabaseManager.updateQuery(context, sql, new Object[] {});
|
||||
System.out.println(
|
||||
updMeta + " metadata value" + ((updMeta > 1) ? "s" : "") + " updated"
|
||||
);
|
||||
|
||||
// Commit the changes
|
||||
context.complete();
|
||||
|
||||
log.info(
|
||||
"Done with updating handle prefix. " +
|
||||
"It was changed " + updHdl + " handle" + ((updHdl > 1) ? "s" : "") +
|
||||
" and " + updMeta + " metadata record" + ((updMeta > 1) ? "s" : "")
|
||||
);
|
||||
|
||||
}
|
||||
catch (SQLException sqle)
|
||||
{
|
||||
if ((context != null) && (context.isValid()))
|
||||
{
|
||||
context.abort();
|
||||
context = null;
|
||||
}
|
||||
System.out.println("\nError during SQL operations.");
|
||||
throw sqle;
|
||||
}
|
||||
|
||||
System.out.println("Handles successfully updated in database.\n");
|
||||
System.out.println("Re-creating browse and search indexes...");
|
||||
|
||||
try
|
||||
{
|
||||
// Reinitialise the search and browse system
|
||||
IndexClient.main(new String[] {"-b"});
|
||||
System.out.println("Browse and search indexes are ready now.");
|
||||
// All done
|
||||
System.out.println("\nAll done successfully. Please check the DSpace logs!\n");
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
// Not a lot we can do
|
||||
System.out.println("Error during re-indexing.");
|
||||
System.out.println(
|
||||
"\n\nAutomatic re-indexing failed. Please perform it manually.\n" +
|
||||
"You should run one of the following commands:\n\n" +
|
||||
" [dspace]/bin/dspace index-discovery -b\n\n" +
|
||||
"If you are using Solr for browse (this is the default setting).\n" +
|
||||
"When launching this command, your servlet container must be running.\n\n" +
|
||||
" [dspace]/bin/dspace index-lucene-init\n\n" +
|
||||
"If you enabled Lucene for search.\n" +
|
||||
"When launching this command, your servlet container must be shutdown.\n"
|
||||
);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
System.out.println("No changes have been made to your data.\n");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
System.out.println("No changes have been made to your data.");
|
||||
System.out.println("Nothing to do! All handles are up-to-date.\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -10,7 +10,6 @@ package org.dspace.identifier;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Metadatum;
|
||||
@@ -68,12 +67,12 @@ public class DOIIdentifierProvider
|
||||
public static final String DOI_QUALIFIER = "uri";
|
||||
|
||||
public static final Integer TO_BE_REGISTERED = 1;
|
||||
public static final Integer TO_BE_RESERVERED = 2;
|
||||
public static final Integer TO_BE_RESERVED = 2;
|
||||
public static final Integer IS_REGISTERED = 3;
|
||||
public static final Integer IS_RESERVED = 4;
|
||||
public static final Integer UPDATE_RESERVERED = 5;
|
||||
public static final Integer UPDATE_RESERVED = 5;
|
||||
public static final Integer UPDATE_REGISTERED = 6;
|
||||
public static final Integer UPDATE_BEFORE_REGISTERATION = 7;
|
||||
public static final Integer UPDATE_BEFORE_REGISTRATION = 7;
|
||||
public static final Integer TO_BE_DELETED = 8;
|
||||
public static final Integer DELETED = 9;
|
||||
|
||||
@@ -251,7 +250,7 @@ public class DOIIdentifierProvider
|
||||
return;
|
||||
}
|
||||
|
||||
doiRow.setColumn("status", TO_BE_RESERVERED);
|
||||
doiRow.setColumn("status", TO_BE_RESERVED);
|
||||
try
|
||||
{
|
||||
DatabaseManager.update(context, doiRow);
|
||||
@@ -353,11 +352,11 @@ public class DOIIdentifierProvider
|
||||
}
|
||||
else if (TO_BE_REGISTERED == doiRow.getIntColumn("status"))
|
||||
{
|
||||
doiRow.setColumn("status", UPDATE_BEFORE_REGISTERATION);
|
||||
doiRow.setColumn("status", UPDATE_BEFORE_REGISTRATION);
|
||||
}
|
||||
else if (IS_RESERVED == doiRow.getIntColumn("status"))
|
||||
{
|
||||
doiRow.setColumn("status", UPDATE_RESERVERED);
|
||||
doiRow.setColumn("status", UPDATE_RESERVED);
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -416,11 +415,11 @@ public class DOIIdentifierProvider
|
||||
{
|
||||
doiRow.setColumn("status", IS_REGISTERED);
|
||||
}
|
||||
else if (UPDATE_BEFORE_REGISTERATION == doiRow.getIntColumn("status"))
|
||||
else if (UPDATE_BEFORE_REGISTRATION == doiRow.getIntColumn("status"))
|
||||
{
|
||||
doiRow.setColumn("status", TO_BE_REGISTERED);
|
||||
}
|
||||
else if (UPDATE_RESERVERED == doiRow.getIntColumn("status"))
|
||||
else if (UPDATE_RESERVED == doiRow.getIntColumn("status"))
|
||||
{
|
||||
doiRow.setColumn("status", IS_RESERVED);
|
||||
}
|
||||
|
@@ -561,7 +561,7 @@ public class EZIDIdentifierProvider
|
||||
/**
|
||||
* Map selected DSpace metadata to fields recognized by DataCite.
|
||||
*/
|
||||
private Map<String, String> crosswalkMetadata(DSpaceObject dso)
|
||||
Map<String, String> crosswalkMetadata(DSpaceObject dso)
|
||||
{
|
||||
if ((null == dso) || !(dso instanceof Item))
|
||||
{
|
||||
@@ -632,18 +632,42 @@ public class EZIDIdentifierProvider
|
||||
mapped.put(DATACITE_PUBLICATION_YEAR, year);
|
||||
}
|
||||
|
||||
// TODO find a way to get a current direct URL to the object and set _target
|
||||
// mapped.put("_target", url);
|
||||
// Supply _target link back to this object
|
||||
String handle = dso.getHandle();
|
||||
if (null == handle)
|
||||
{
|
||||
log.warn("{} #{} has no handle -- location not set.",
|
||||
dso.getTypeText(), dso.getID());
|
||||
}
|
||||
else
|
||||
{
|
||||
String url = configurationService.getProperty("dspace.url")
|
||||
+ "/handle/" + item.getHandle();
|
||||
log.info("Supplying location: {}", url);
|
||||
mapped.put("_target", url);
|
||||
}
|
||||
|
||||
return mapped;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide a map from DSO metadata keys to EZID keys. This will drive the
|
||||
* generation of EZID metadata for the minting of new identifiers.
|
||||
*
|
||||
* @param aCrosswalk
|
||||
*/
|
||||
@Required
|
||||
public void setCrosswalk(Map<String, String> aCrosswalk)
|
||||
{
|
||||
crosswalk = aCrosswalk;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide a map from DSO metadata keys to classes which can transform their
|
||||
* values to something acceptable to EZID.
|
||||
*
|
||||
* @param transformMap
|
||||
*/
|
||||
public void setCrosswalkTransform(Map<String, Transform> transformMap)
|
||||
{
|
||||
transforms = transformMap;
|
||||
|
@@ -179,19 +179,19 @@ public class DOIOrganiser {
|
||||
|
||||
if (line.hasOption('l'))
|
||||
{
|
||||
organiser.list("reservation", null, null, DOIIdentifierProvider.TO_BE_RESERVERED);
|
||||
organiser.list("reservation", null, null, DOIIdentifierProvider.TO_BE_RESERVED);
|
||||
organiser.list("registration", null, null, DOIIdentifierProvider.TO_BE_REGISTERED);
|
||||
organiser.list("update", null, null,
|
||||
DOIIdentifierProvider.UPDATE_BEFORE_REGISTERATION,
|
||||
DOIIdentifierProvider.UPDATE_BEFORE_REGISTRATION,
|
||||
DOIIdentifierProvider.UPDATE_REGISTERED,
|
||||
DOIIdentifierProvider.UPDATE_RESERVERED);
|
||||
DOIIdentifierProvider.UPDATE_RESERVED);
|
||||
organiser.list("deletion", null, null, DOIIdentifierProvider.TO_BE_DELETED);
|
||||
}
|
||||
|
||||
if (line.hasOption('s'))
|
||||
{
|
||||
TableRowIterator it = organiser
|
||||
.getDOIsByStatus(DOIIdentifierProvider.TO_BE_RESERVERED);
|
||||
.getDOIsByStatus(DOIIdentifierProvider.TO_BE_RESERVED);
|
||||
|
||||
try {
|
||||
if (!it.hasNext())
|
||||
@@ -244,8 +244,8 @@ public class DOIOrganiser {
|
||||
if (line.hasOption('u'))
|
||||
{
|
||||
TableRowIterator it = organiser.getDOIsByStatus(
|
||||
DOIIdentifierProvider.UPDATE_BEFORE_REGISTERATION,
|
||||
DOIIdentifierProvider.UPDATE_RESERVERED,
|
||||
DOIIdentifierProvider.UPDATE_BEFORE_REGISTRATION,
|
||||
DOIIdentifierProvider.UPDATE_RESERVED,
|
||||
DOIIdentifierProvider.UPDATE_REGISTERED);
|
||||
|
||||
try {
|
||||
|
@@ -259,7 +259,7 @@ public class CCLookup {
|
||||
throws IOException{
|
||||
|
||||
// Determine the issue URL
|
||||
String issueUrl = this.cc_root + "/license/" + licenseId + "/issue";
|
||||
String issueUrl = cc_root + "/license/" + licenseId + "/issue";
|
||||
// Assemble the "answers" document
|
||||
String answer_doc = "<answers>\n<locale>" + lang + "</locale>\n" + "<license-" + licenseId + ">\n";
|
||||
Iterator keys = answers.keySet().iterator();
|
||||
@@ -411,31 +411,18 @@ public class CCLookup {
|
||||
|
||||
public String getRdf()
|
||||
throws IOException {
|
||||
String myString = null;
|
||||
java.io.ByteArrayOutputStream outputstream = new java.io.ByteArrayOutputStream();
|
||||
String result = "";
|
||||
try {
|
||||
outputstream.write("<result>\n".getBytes());
|
||||
JDOMXPath xpathRdf = new JDOMXPath("//result/rdf");
|
||||
JDOMXPath xpathLicenseRdf = new JDOMXPath("//result/licenserdf");
|
||||
XMLOutputter xmloutputter = new XMLOutputter();
|
||||
Element rdfParent = ((Element)xpathRdf.selectSingleNode(this.license_doc));
|
||||
xmloutputter.output(rdfParent, outputstream);
|
||||
Element licenseRdfParent = ((Element)xpathLicenseRdf.selectSingleNode(this.license_doc));
|
||||
outputstream.write("\n".getBytes());
|
||||
xmloutputter.output(licenseRdfParent, outputstream);
|
||||
outputstream.write("\n</result>\n".getBytes());
|
||||
result = CreativeCommons.fetchLicenseRDF(license_doc);
|
||||
} catch (Exception e) {
|
||||
log.warn("An error occurred getting the rdf . . ." + e.getMessage() );
|
||||
setSuccess(false);
|
||||
} finally {
|
||||
outputstream.close();
|
||||
return outputstream.toString();
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public boolean isSuccess() {
|
||||
setSuccess(false);
|
||||
java.io.ByteArrayOutputStream outputstream = new java.io.ByteArrayOutputStream();
|
||||
JDOMXPath xp_Success = null;
|
||||
String text = null;
|
||||
try {
|
||||
|
@@ -7,13 +7,16 @@
|
||||
*/
|
||||
package org.dspace.license;
|
||||
|
||||
import java.io.*;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.StringWriter;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import javax.xml.transform.Templates;
|
||||
import javax.xml.transform.Transformer;
|
||||
import javax.xml.transform.TransformerConfigurationException;
|
||||
import javax.xml.transform.TransformerException;
|
||||
import javax.xml.transform.TransformerFactory;
|
||||
@@ -26,11 +29,14 @@ import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.BitstreamFormat;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Metadatum;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.Metadatum;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.Utils;
|
||||
import org.jdom.Document;
|
||||
import org.jdom.transform.JDOMResult;
|
||||
import org.jdom.transform.JDOMSource;
|
||||
|
||||
public class CreativeCommons
|
||||
{
|
||||
@@ -46,9 +52,17 @@ public class CreativeCommons
|
||||
|
||||
/**
|
||||
* Some BitStream Names (BSN)
|
||||
*
|
||||
* @deprecated use the metadata retrieved at {@link CreativeCommons#getCCField(String)} (see https://jira.duraspace.org/browse/DS-2604)
|
||||
*/
|
||||
@Deprecated
|
||||
private static final String BSN_LICENSE_URL = "license_url";
|
||||
|
||||
/**
|
||||
*
|
||||
* @deprecated to make uniform JSPUI and XMLUI approach the bitstream with the license in the textual format it is no longer stored (see https://jira.duraspace.org/browse/DS-2604)
|
||||
*/
|
||||
@Deprecated
|
||||
private static final String BSN_LICENSE_TEXT = "license_text";
|
||||
|
||||
private static final String BSN_LICENSE_RDF = "license_rdf";
|
||||
@@ -121,41 +135,6 @@ public class CreativeCommons
|
||||
setBitstreamFromBytes(item, bundle, BSN_LICENSE_RDF, bs_rdf_format, licenseRdf.getBytes());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This is a bit of the "do-the-right-thing" method for CC stuff in an item
|
||||
*/
|
||||
public static void setLicense(Context context, Item item,
|
||||
String cc_license_url) throws SQLException, IOException,
|
||||
AuthorizeException
|
||||
{
|
||||
Bundle bundle = getCcBundle(item);
|
||||
|
||||
// get some more information
|
||||
String license_text = fetchLicenseText(cc_license_url);
|
||||
String license_rdf = fetchLicenseRDF(cc_license_url);
|
||||
|
||||
// set the formats
|
||||
BitstreamFormat bs_url_format = BitstreamFormat.findByShortDescription(
|
||||
context, "License");
|
||||
BitstreamFormat bs_text_format = BitstreamFormat.findByShortDescription(
|
||||
context, "CC License");
|
||||
BitstreamFormat bs_rdf_format = BitstreamFormat.findByShortDescription(
|
||||
context, "RDF XML");
|
||||
|
||||
// set the URL bitstream
|
||||
setBitstreamFromBytes(item, bundle, BSN_LICENSE_URL, bs_url_format,
|
||||
cc_license_url.getBytes());
|
||||
|
||||
// set the license text bitstream
|
||||
setBitstreamFromBytes(item, bundle, BSN_LICENSE_TEXT, bs_text_format,
|
||||
license_text.getBytes());
|
||||
|
||||
// set the RDF bitstream
|
||||
setBitstreamFromBytes(item, bundle, BSN_LICENSE_RDF, bs_rdf_format,
|
||||
license_rdf.getBytes());
|
||||
}
|
||||
|
||||
/**
|
||||
* Used by DSpaceMetsIngester
|
||||
*
|
||||
@@ -224,8 +203,7 @@ public class CreativeCommons
|
||||
// verify it has correct contents
|
||||
try
|
||||
{
|
||||
if ((getLicenseURL(item) == null) || (getLicenseText(item) == null)
|
||||
|| (getLicenseRDF(item) == null))
|
||||
if ((getLicenseURL(item) == null))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
@@ -238,18 +216,6 @@ public class CreativeCommons
|
||||
return true;
|
||||
}
|
||||
|
||||
public static String getLicenseURL(Item item) throws SQLException,
|
||||
IOException, AuthorizeException
|
||||
{
|
||||
return getStringFromBitstream(item, BSN_LICENSE_URL);
|
||||
}
|
||||
|
||||
public static String getLicenseText(Item item) throws SQLException,
|
||||
IOException, AuthorizeException
|
||||
{
|
||||
return getStringFromBitstream(item, BSN_LICENSE_TEXT);
|
||||
}
|
||||
|
||||
public static String getLicenseRDF(Item item) throws SQLException,
|
||||
IOException, AuthorizeException
|
||||
{
|
||||
@@ -269,56 +235,55 @@ public class CreativeCommons
|
||||
/**
|
||||
* Get Creative Commons license Text, returning Bitstream object.
|
||||
* @return bitstream or null.
|
||||
*
|
||||
* @deprecated to make uniform JSPUI and XMLUI approach the bitstream with the license in the textual format it is no longer stored (see https://jira.duraspace.org/browse/DS-2604)
|
||||
*/
|
||||
@Deprecated
|
||||
public static Bitstream getLicenseTextBitstream(Item item) throws SQLException,
|
||||
IOException, AuthorizeException
|
||||
{
|
||||
return getBitstream(item, BSN_LICENSE_TEXT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the license text
|
||||
*
|
||||
* @param item - the item
|
||||
* @return the license in textual format
|
||||
* @throws SQLException
|
||||
* @throws IOException
|
||||
* @throws AuthorizeException
|
||||
*
|
||||
* @deprecated to make uniform JSPUI and XMLUI approach the bitstream with the license in the textual format it is no longer stored (see https://jira.duraspace.org/browse/DS-2604)
|
||||
*/
|
||||
public static String getLicenseText(Item item) throws SQLException, IOException, AuthorizeException {
|
||||
return getStringFromBitstream(item, BSN_LICENSE_TEXT);
|
||||
}
|
||||
|
||||
public static String getLicenseURL(Item item) throws SQLException, IOException, AuthorizeException {
|
||||
String licenseUri = CreativeCommons.getCCField("uri").ccItemValue(item);
|
||||
if (StringUtils.isNotBlank(licenseUri)) {
|
||||
return licenseUri;
|
||||
}
|
||||
// JSPUI backward compatibility see https://jira.duraspace.org/browse/DS-2604
|
||||
return getStringFromBitstream(item, BSN_LICENSE_URL);
|
||||
}
|
||||
|
||||
public static String fetchLicenseRdf(String ccResult) {
|
||||
StringWriter result = new StringWriter();
|
||||
String licenseRdfString = new String("");
|
||||
try {
|
||||
InputStream inputstream = new ByteArrayInputStream(ccResult.getBytes("UTF-8"));
|
||||
templates.newTransformer().transform(new StreamSource(inputstream), new StreamResult(result));
|
||||
} catch (TransformerException te) {
|
||||
throw new RuntimeException("Transformer exception " + te.getMessage(), te);
|
||||
} catch (IOException ioe) {
|
||||
throw new RuntimeException("IOexception " + ioe.getCause().toString(), ioe);
|
||||
} finally {
|
||||
return result.getBuffer().toString();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* The next two methods are old CC.
|
||||
* Remains until prev. usages are eliminated.
|
||||
* @Deprecated
|
||||
*
|
||||
*/
|
||||
/**
|
||||
* Get a few license-specific properties. We expect these to be cached at
|
||||
* least per server run.
|
||||
* Apply same transformation on the document to retrieve only the most relevant part of the document passed as parameter.
|
||||
* If no transformation is needed then take in consideration to empty the CreativeCommons.xml
|
||||
*
|
||||
* @param license - an element that could be contains as part of your content the license rdf
|
||||
* @return the document license in textual format after the transformation
|
||||
*/
|
||||
public static String fetchLicenseText(String license_url)
|
||||
{
|
||||
String text_url = license_url;
|
||||
byte[] urlBytes = fetchURL(text_url);
|
||||
|
||||
return (urlBytes != null) ? new String(urlBytes) : "";
|
||||
}
|
||||
|
||||
public static String fetchLicenseRDF(String license_url)
|
||||
public static String fetchLicenseRDF(Document license)
|
||||
{
|
||||
StringWriter result = new StringWriter();
|
||||
|
||||
try
|
||||
{
|
||||
templates.newTransformer().transform(
|
||||
new StreamSource(license_url + "rdf"),
|
||||
new JDOMSource(license),
|
||||
new StreamResult(result)
|
||||
);
|
||||
}
|
||||
@@ -421,33 +386,6 @@ public class CreativeCommons
|
||||
return baos.toByteArray();
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch the contents of a URL
|
||||
*/
|
||||
private static byte[] fetchURL(String url_string)
|
||||
{
|
||||
try
|
||||
{
|
||||
String line = "";
|
||||
URL url = new URL(url_string);
|
||||
URLConnection connection = url.openConnection();
|
||||
InputStream inputStream = connection.getInputStream();
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
while ((line = reader.readLine()) != null)
|
||||
{
|
||||
sb.append(line);
|
||||
}
|
||||
|
||||
return sb.toString().getBytes();
|
||||
}
|
||||
catch (Exception exc)
|
||||
{
|
||||
log.error(exc.getMessage());
|
||||
return null;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Returns a metadata field handle for given field Id
|
||||
*/
|
||||
@@ -564,4 +502,34 @@ public class CreativeCommons
|
||||
item.addMetadata(params[0], params[1], params[2], params[3], value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove license information, delete also the bitstream
|
||||
*
|
||||
* @param context - DSpace Context
|
||||
* @param uriField - the metadata field for license uri
|
||||
* @param nameField - the metadata field for license name
|
||||
* @param item - the item
|
||||
* @throws AuthorizeException
|
||||
* @throws IOException
|
||||
* @throws SQLException
|
||||
*/
|
||||
public static void removeLicense(Context context, MdField uriField,
|
||||
MdField nameField, Item item) throws AuthorizeException, IOException, SQLException {
|
||||
// only remove any previous licenses
|
||||
String licenseUri = uriField.ccItemValue(item);
|
||||
if (licenseUri != null) {
|
||||
uriField.removeItemValue(item, licenseUri);
|
||||
if (ConfigurationManager.getBooleanProperty("cc.submit.setname"))
|
||||
{
|
||||
String licenseName = nameField.keyedItemValue(item, licenseUri);
|
||||
nameField.removeItemValue(item, licenseName);
|
||||
}
|
||||
if (ConfigurationManager.getBooleanProperty("cc.submit.addbitstream"))
|
||||
{
|
||||
removeLicense(context, item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -72,7 +72,7 @@ public class LicenseCleanup
|
||||
{
|
||||
|
||||
Context ctx = new Context();
|
||||
ctx.setIgnoreAuthorization(true);
|
||||
ctx.turnOffAuthorisationSystem();
|
||||
ItemIterator iter = Item.findAll(ctx);
|
||||
|
||||
Properties props = new Properties();
|
||||
|
@@ -20,6 +20,7 @@ import org.dspace.content.Bundle;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.Site;
|
||||
import org.dspace.content.WorkspaceItem;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.event.Consumer;
|
||||
@@ -52,6 +53,8 @@ public class RDFConsumer implements Consumer
|
||||
}
|
||||
|
||||
int sType = event.getSubjectType();
|
||||
log.debug(event.getEventTypeAsString() + " for "
|
||||
+ event.getSubjectTypeAsString() + ":" + event.getSubjectID());
|
||||
switch (sType)
|
||||
{
|
||||
case (Constants.BITSTREAM) :
|
||||
@@ -100,7 +103,7 @@ public class RDFConsumer implements Consumer
|
||||
Bitstream bitstream = Bitstream.find(ctx, event.getSubjectID());
|
||||
if (bitstream == null)
|
||||
{
|
||||
log.warn("Cannot find bitstream " + event.getSubjectID() + "! "
|
||||
log.debug("Cannot find bitstream " + event.getSubjectID() + "! "
|
||||
+ "Ignoring, as it is likely it was deleted "
|
||||
+ "and we'll cover it by a REMOVE event on its bundle.");
|
||||
return;
|
||||
@@ -111,6 +114,11 @@ public class RDFConsumer implements Consumer
|
||||
Item[] items = b.getItems();
|
||||
for (Item i : items)
|
||||
{
|
||||
if (WorkspaceItem.findByItem(ctx, i) != null)
|
||||
{
|
||||
log.debug("Ignoring Item " + i.getID() + " as a corresponding workspace item exists.");
|
||||
continue;
|
||||
}
|
||||
DSOIdentifier id = new DSOIdentifier(i, ctx);
|
||||
if (!this.toDelete.contains(id) && !this.toConvert.contains(id))
|
||||
{
|
||||
@@ -148,7 +156,7 @@ public class RDFConsumer implements Consumer
|
||||
Bundle bundle = Bundle.find(ctx, event.getSubjectID());
|
||||
if (bundle == null)
|
||||
{
|
||||
log.warn("Cannot find bundle " + event.getSubjectID() + "! "
|
||||
log.debug("Cannot find bundle " + event.getSubjectID() + "! "
|
||||
+ "Ignoring, as it is likely it was deleted "
|
||||
+ "and we'll cover it by a REMOVE event on its item.");
|
||||
return;
|
||||
@@ -156,6 +164,11 @@ public class RDFConsumer implements Consumer
|
||||
Item[] items = bundle.getItems();
|
||||
for (Item i : items)
|
||||
{
|
||||
if (WorkspaceItem.findByItem(ctx, i) != null)
|
||||
{
|
||||
log.debug("Ignoring Item " + i.getID() + " as a corresponding workspace item exists.");
|
||||
continue;
|
||||
}
|
||||
DSOIdentifier id = new DSOIdentifier(i, ctx);
|
||||
if (!this.toDelete.contains(id) && !this.toConvert.contains(id))
|
||||
{
|
||||
@@ -216,14 +229,24 @@ public class RDFConsumer implements Consumer
|
||||
DSpaceObject dso = event.getSubject(ctx);
|
||||
if (dso == null)
|
||||
{
|
||||
log.warn("Cannot find " + event.getSubjectTypeAsString() + " "
|
||||
log.debug("Cannot find " + event.getSubjectTypeAsString() + " "
|
||||
+ event.getSubjectID() + "! " + "Ignoring, as it is "
|
||||
+ "likely it was deleted and we'll cover it by another "
|
||||
+ "event with the type REMOVE.");
|
||||
return;
|
||||
}
|
||||
DSOIdentifier id = new DSOIdentifier(dso, ctx);
|
||||
|
||||
// ignore unfinished submissions here. Every unfinished submission
|
||||
// has an workspace item. The item flag "in_archive" doesn't help us
|
||||
// here as this is also set to false if a newer version was submitted.
|
||||
if (dso instanceof Item
|
||||
&& WorkspaceItem.findByItem(ctx, (Item) dso) != null)
|
||||
{
|
||||
log.debug("Ignoring Item " + dso.getID() + " as a corresponding workspace item exists.");
|
||||
return;
|
||||
}
|
||||
|
||||
DSOIdentifier id = new DSOIdentifier(dso, ctx);
|
||||
// If an item gets withdrawn, a MODIFIY event is fired. We have to
|
||||
// delete the item from the triple store instead of converting it.
|
||||
// we don't have to take care for reinstantions of items as they can
|
||||
|
@@ -392,7 +392,7 @@ public class DSIndexer
|
||||
{
|
||||
setBatchProcessingMode(true);
|
||||
Context context = new Context();
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
String usage = "org.dspace.search.DSIndexer [-cbhof[r <item handle>]] or nothing to update/clean an existing index.";
|
||||
Options options = new Options();
|
||||
|
@@ -331,6 +331,7 @@ public class SolrLogger
|
||||
{
|
||||
doc1.addField("userAgent", request.getHeader("User-Agent"));
|
||||
}
|
||||
doc1.addField("isBot",isSpiderBot);
|
||||
// Save the location information if valid, save the event without
|
||||
// location information if not valid
|
||||
if(locationService != null)
|
||||
@@ -354,7 +355,7 @@ public class SolrLogger
|
||||
doc1.addField("city", location.city);
|
||||
doc1.addField("latitude", location.latitude);
|
||||
doc1.addField("longitude", location.longitude);
|
||||
doc1.addField("isBot",isSpiderBot);
|
||||
|
||||
|
||||
|
||||
}
|
||||
@@ -416,6 +417,7 @@ public class SolrLogger
|
||||
{
|
||||
doc1.addField("userAgent", userAgent);
|
||||
}
|
||||
doc1.addField("isBot",isSpiderBot);
|
||||
// Save the location information if valid, save the event without
|
||||
// location information if not valid
|
||||
if(locationService != null)
|
||||
@@ -439,7 +441,7 @@ public class SolrLogger
|
||||
doc1.addField("city", location.city);
|
||||
doc1.addField("latitude", location.latitude);
|
||||
doc1.addField("longitude", location.longitude);
|
||||
doc1.addField("isBot",isSpiderBot);
|
||||
|
||||
|
||||
|
||||
}
|
||||
@@ -1338,6 +1340,7 @@ public class SolrLogger
|
||||
//Upload the data in the csv files to our new solr core
|
||||
ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest("/update/csv");
|
||||
contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
|
||||
contentStreamUpdateRequest.setParam("skip", "_version_");
|
||||
contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
|
||||
contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");
|
||||
|
||||
|
@@ -15,6 +15,7 @@ import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.Collections;
|
||||
import java.util.regex.Pattern;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
@@ -42,10 +43,10 @@ public class SpiderDetector {
|
||||
private static IPTable table = null;
|
||||
|
||||
/** Collection of regular expressions to match known spiders' agents. */
|
||||
private static List<Pattern> agents = new ArrayList<Pattern>();
|
||||
private static List<Pattern> agents = Collections.synchronizedList(new ArrayList<Pattern>());
|
||||
|
||||
/** Collection of regular expressions to match known spiders' domain names. */
|
||||
private static List<Pattern> domains = new ArrayList<Pattern>();
|
||||
private static List<Pattern> domains = Collections.synchronizedList(new ArrayList<Pattern>());
|
||||
|
||||
/**
|
||||
* Utility method which reads lines from a file & returns them in a Set.
|
||||
@@ -199,13 +200,15 @@ public class SpiderDetector {
|
||||
{
|
||||
// See if any agent patterns match
|
||||
if (null != agent)
|
||||
{
|
||||
if (agents.isEmpty())
|
||||
loadPatterns("agents", agents);
|
||||
|
||||
{
|
||||
synchronized(agents)
|
||||
{
|
||||
if (agents.isEmpty())
|
||||
loadPatterns("agents", agents);
|
||||
}
|
||||
for (Pattern candidate : agents)
|
||||
{
|
||||
// prevent matcher() invocation from a null Pattern object
|
||||
// prevent matcher() invocation from a null Pattern object
|
||||
if (null != candidate && candidate.matcher(agent).find())
|
||||
{
|
||||
return true;
|
||||
@@ -230,15 +233,15 @@ public class SpiderDetector {
|
||||
// No. See if any DNS names match
|
||||
if (null != hostname)
|
||||
{
|
||||
if (domains.isEmpty())
|
||||
synchronized(domains)
|
||||
{
|
||||
loadPatterns("domains", domains);
|
||||
if (domains.isEmpty())
|
||||
loadPatterns("domains", domains);
|
||||
}
|
||||
|
||||
for (Pattern candidate : domains)
|
||||
{
|
||||
// prevent matcher() invocation from a null Pattern object
|
||||
if (null != candidate && candidate.matcher(hostname).find())
|
||||
// prevent matcher() invocation from a null Pattern object
|
||||
if (null != candidate && candidate.matcher(hostname).find())
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
@@ -15,7 +15,6 @@ import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.ResultSetMetaData;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.SQLWarning;
|
||||
import java.sql.Statement;
|
||||
import java.sql.Time;
|
||||
import java.sql.Timestamp;
|
||||
@@ -35,8 +34,6 @@ import javax.sql.DataSource;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.flywaydb.core.Flyway;
|
||||
import org.flywaydb.core.api.MigrationInfo;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -881,22 +878,22 @@ public class DatabaseManager
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the canonical name for a table.
|
||||
* Return the canonical name for a database object.
|
||||
*
|
||||
* @param table
|
||||
* The name of the table.
|
||||
* @return The canonical name of the table.
|
||||
* @param db_object
|
||||
* The name of the database object.
|
||||
* @return The canonical name of the database object.
|
||||
*/
|
||||
static String canonicalize(String table)
|
||||
static String canonicalize(String db_object)
|
||||
{
|
||||
// Oracle expects upper-case table names
|
||||
// Oracle expects upper-case table names, schemas, etc.
|
||||
if (isOracle)
|
||||
{
|
||||
return (table == null) ? null : table.toUpperCase();
|
||||
return (db_object == null) ? null : db_object.toUpperCase();
|
||||
}
|
||||
|
||||
// default database postgres wants lower-case table names
|
||||
return (table == null) ? null : table.toLowerCase();
|
||||
return (db_object == null) ? null : db_object.toLowerCase();
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
@@ -1237,10 +1234,6 @@ public class DatabaseManager
|
||||
|
||||
try
|
||||
{
|
||||
String schema = ConfigurationManager.getProperty("db.schema");
|
||||
if(StringUtils.isBlank(schema)){
|
||||
schema = null;
|
||||
}
|
||||
String catalog = null;
|
||||
|
||||
int dotIndex = table.indexOf('.');
|
||||
@@ -1254,6 +1247,9 @@ public class DatabaseManager
|
||||
|
||||
connection = getConnection();
|
||||
|
||||
// Get current database schema name
|
||||
String schema = DatabaseUtils.getSchemaName(connection);
|
||||
|
||||
DatabaseMetaData metadata = connection.getMetaData();
|
||||
Map<String, ColumnInfo> results = new HashMap<String, ColumnInfo>();
|
||||
|
||||
|
@@ -873,8 +873,10 @@ public class DatabaseUtils
|
||||
* Get the Database Schema Name in use by this Connection, so that it can
|
||||
* be used to limit queries in other methods (e.g. tableExists()).
|
||||
* <P>
|
||||
* For PostgreSQL, schema is simply what is configured in db.schema or "public"
|
||||
* For Oracle, schema is actually the database *USER* or owner.
|
||||
* NOTE: Once we upgrade to using Apache Commons DBCP / Pool version 2.0,
|
||||
* this method WILL BE REMOVED in favor of java.sql.Connection's new
|
||||
* "getSchema()" method.
|
||||
* http://docs.oracle.com/javase/7/docs/api/java/sql/Connection.html#getSchema()
|
||||
*
|
||||
* @param connection
|
||||
* Current Database Connection
|
||||
@@ -886,27 +888,29 @@ public class DatabaseUtils
|
||||
String schema = null;
|
||||
DatabaseMetaData meta = connection.getMetaData();
|
||||
|
||||
// Determine our DB type
|
||||
String dbType = DatabaseManager.findDbKeyword(meta);
|
||||
// Check the configured "db.schema" FIRST for the value configured there
|
||||
schema = DatabaseManager.canonicalize(ConfigurationManager.getProperty("db.schema"));
|
||||
|
||||
if(dbType.equals(DatabaseManager.DBMS_POSTGRES))
|
||||
// If unspecified, determine "sane" defaults based on DB type
|
||||
if(StringUtils.isBlank(schema))
|
||||
{
|
||||
// Get the schema name from "db.schema"
|
||||
schema = ConfigurationManager.getProperty("db.schema");
|
||||
|
||||
// If unspecified, default schema is "public"
|
||||
if(StringUtils.isBlank(schema)){
|
||||
String dbType = DatabaseManager.findDbKeyword(meta);
|
||||
|
||||
if(dbType.equals(DatabaseManager.DBMS_POSTGRES))
|
||||
{
|
||||
// For PostgreSQL, the default schema is named "public"
|
||||
// See: http://www.postgresql.org/docs/9.0/static/ddl-schemas.html
|
||||
schema = "public";
|
||||
}
|
||||
else if (dbType.equals(DatabaseManager.DBMS_ORACLE))
|
||||
{
|
||||
// For Oracle, default schema is actually the user account
|
||||
// See: http://stackoverflow.com/a/13341390
|
||||
schema = meta.getUserName();
|
||||
}
|
||||
else
|
||||
schema = null;
|
||||
}
|
||||
else if (dbType.equals(DatabaseManager.DBMS_ORACLE))
|
||||
{
|
||||
// Schema is actually the user account
|
||||
// See: http://stackoverflow.com/a/13341390
|
||||
schema = meta.getUserName();
|
||||
}
|
||||
else
|
||||
schema = null;
|
||||
|
||||
return schema;
|
||||
}
|
||||
|
@@ -23,18 +23,17 @@ import org.apache.commons.lang.StringUtils;
|
||||
public class MigrationUtils
|
||||
{
|
||||
/**
|
||||
* Drop a given Database Constraint (based on the current database type).
|
||||
* Drop a given Database Column Constraint (based on the current database type).
|
||||
* Returns a "checksum" for this migration which can be used as part of
|
||||
* a Flyway Java migration
|
||||
*
|
||||
* @param connection the current Database connection
|
||||
* @param tableName the name of the table the constraint applies to
|
||||
* @param columnName the name of the column the constraint applies to
|
||||
* @param constraintSuffix Only used for PostgreSQL, whose constraint naming convention depends on a suffix (key, fkey, etc)
|
||||
* @return migration checksum as an Integer
|
||||
* @throws SQLException if a database error occurs
|
||||
*/
|
||||
public static Integer dropDBConstraint(Connection connection, String tableName, String columnName, String constraintSuffix)
|
||||
public static Integer dropDBConstraint(Connection connection, String tableName, String columnName)
|
||||
throws SQLException
|
||||
{
|
||||
Integer checksum = -1;
|
||||
@@ -48,13 +47,17 @@ public class MigrationUtils
|
||||
String dbtype = DatabaseManager.findDbKeyword(meta);
|
||||
String constraintName = null;
|
||||
String constraintNameSQL = null;
|
||||
String schemaName = null;
|
||||
switch(dbtype)
|
||||
{
|
||||
case DatabaseManager.DBMS_POSTGRES:
|
||||
// In Postgres, constraints are always named:
|
||||
// {tablename}_{columnname(s)}_{suffix}
|
||||
// see: http://stackoverflow.com/a/4108266/3750035
|
||||
constraintName = StringUtils.lowerCase(tableName) + "_" + StringUtils.lowerCase(columnName) + "_" + StringUtils.lowerCase(constraintSuffix);
|
||||
// In Postgres, column constraints are listed in the "information_schema.key_column_usage" view
|
||||
// See: http://www.postgresql.org/docs/9.4/static/infoschema-key-column-usage.html
|
||||
constraintNameSQL = "SELECT DISTINCT CONSTRAINT_NAME " +
|
||||
"FROM information_schema.key_column_usage " +
|
||||
"WHERE TABLE_NAME = ? AND COLUMN_NAME = ? AND TABLE_SCHEMA = ?";
|
||||
// For Postgres, we need to limit by the schema as well
|
||||
schemaName = DatabaseUtils.getSchemaName(connection);
|
||||
break;
|
||||
case DatabaseManager.DBMS_ORACLE:
|
||||
// In Oracle, constraints are listed in the USER_CONS_COLUMNS table
|
||||
@@ -72,35 +75,46 @@ public class MigrationUtils
|
||||
throw new SQLException("DBMS " + dbtype + " is unsupported in this migration.");
|
||||
}
|
||||
|
||||
// If we have a SQL query to run for the constraint name, then run it
|
||||
if (constraintNameSQL!=null)
|
||||
// Run the query to obtain the constraint name, passing it the parameters
|
||||
PreparedStatement statement = connection.prepareStatement(constraintNameSQL);
|
||||
statement.setString(1, DatabaseUtils.canonicalize(connection, tableName));
|
||||
statement.setString(2, DatabaseUtils.canonicalize(connection, columnName));
|
||||
// Also limit by database schema, if a schemaName has been set (only needed for PostgreSQL)
|
||||
if(schemaName!=null && !schemaName.isEmpty())
|
||||
{
|
||||
// Run the query to obtain the constraint name, passing it the parameters
|
||||
PreparedStatement statement = connection.prepareStatement(constraintNameSQL);
|
||||
statement.setString(1, StringUtils.upperCase(tableName));
|
||||
statement.setString(2, StringUtils.upperCase(columnName));
|
||||
try
|
||||
statement.setString(3, DatabaseUtils.canonicalize(connection, schemaName));
|
||||
}
|
||||
try
|
||||
{
|
||||
ResultSet results = statement.executeQuery();
|
||||
if(results.next())
|
||||
{
|
||||
ResultSet results = statement.executeQuery();
|
||||
if(results.next())
|
||||
{
|
||||
constraintName = results.getString("CONSTRAINT_NAME");
|
||||
}
|
||||
results.close();
|
||||
}
|
||||
finally
|
||||
{
|
||||
statement.close();
|
||||
constraintName = results.getString("CONSTRAINT_NAME");
|
||||
}
|
||||
results.close();
|
||||
}
|
||||
finally
|
||||
{
|
||||
statement.close();
|
||||
}
|
||||
|
||||
// As long as we have a constraint name, drop it
|
||||
if (constraintName!=null && !constraintName.isEmpty())
|
||||
{
|
||||
// This drop constaint SQL should be the same in all databases
|
||||
String dropConstraintSQL = "ALTER TABLE " + tableName + " DROP CONSTRAINT " + constraintName;
|
||||
// Canonicalize the constraintName
|
||||
constraintName = DatabaseUtils.canonicalize(connection, constraintName);
|
||||
// If constraintName starts with a $, surround with double quotes
|
||||
// (This is mostly for PostgreSQL, which sometimes names constraints $1, $2, etc)
|
||||
if(constraintName.startsWith("$"))
|
||||
{
|
||||
constraintName = "\"" + constraintName + "\"";
|
||||
}
|
||||
|
||||
PreparedStatement statement = connection.prepareStatement(dropConstraintSQL);
|
||||
// This drop constaint SQL should be the same in all databases
|
||||
String dropConstraintSQL = "ALTER TABLE " + DatabaseUtils.canonicalize(connection, tableName) +
|
||||
" DROP CONSTRAINT " + constraintName;
|
||||
|
||||
statement = connection.prepareStatement(dropConstraintSQL);
|
||||
try
|
||||
{
|
||||
statement.execute();
|
||||
|
@@ -13,8 +13,6 @@ import java.sql.SQLException;
|
||||
import org.dspace.storage.rdbms.MigrationUtils;
|
||||
import org.flywaydb.core.api.migration.MigrationChecksumProvider;
|
||||
import org.flywaydb.core.api.migration.jdbc.JdbcMigration;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This class is in support of the "V1.4__Upgrade_to_DSpace_1.4_schema.sql"
|
||||
@@ -42,9 +40,6 @@ import org.slf4j.LoggerFactory;
|
||||
public class V1_3_9__Drop_constraint_for_DSpace_1_4_schema
|
||||
implements JdbcMigration, MigrationChecksumProvider
|
||||
{
|
||||
/** logging category */
|
||||
private static final Logger log = LoggerFactory.getLogger(V1_3_9__Drop_constraint_for_DSpace_1_4_schema.class);
|
||||
|
||||
/* The checksum to report for this migration (when successful) */
|
||||
private int checksum = -1;
|
||||
|
||||
@@ -57,7 +52,7 @@ public class V1_3_9__Drop_constraint_for_DSpace_1_4_schema
|
||||
throws IOException, SQLException
|
||||
{
|
||||
// Drop the constraint associated with "name" column of "community"
|
||||
checksum = MigrationUtils.dropDBConstraint(connection, "community", "name", "key");
|
||||
checksum = MigrationUtils.dropDBConstraint(connection, "community", "name");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -13,8 +13,6 @@ import java.sql.SQLException;
|
||||
import org.dspace.storage.rdbms.MigrationUtils;
|
||||
import org.flywaydb.core.api.migration.MigrationChecksumProvider;
|
||||
import org.flywaydb.core.api.migration.jdbc.JdbcMigration;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This class is in support of the "V1.6__Upgrade_to_DSpace_1.6_schema.sql"
|
||||
@@ -42,9 +40,6 @@ import org.slf4j.LoggerFactory;
|
||||
public class V1_5_9__Drop_constraint_for_DSpace_1_6_schema
|
||||
implements JdbcMigration, MigrationChecksumProvider
|
||||
{
|
||||
/** logging category */
|
||||
private static final Logger log = LoggerFactory.getLogger(V1_5_9__Drop_constraint_for_DSpace_1_6_schema.class);
|
||||
|
||||
/* The checksum to report for this migration (when successful) */
|
||||
private int checksum = -1;
|
||||
|
||||
@@ -57,11 +52,11 @@ public class V1_5_9__Drop_constraint_for_DSpace_1_6_schema
|
||||
throws IOException, SQLException
|
||||
{
|
||||
// Drop the constraint associated with "collection_id" column of "community2collection" table
|
||||
int return1 = MigrationUtils.dropDBConstraint(connection, "community2collection", "collection_id", "fkey");
|
||||
int return1 = MigrationUtils.dropDBConstraint(connection, "community2collection", "collection_id");
|
||||
// Drop the constraint associated with "child_comm_id" column of "community2community" table
|
||||
int return2 = MigrationUtils.dropDBConstraint(connection, "community2community", "child_comm_id", "fkey");
|
||||
int return2 = MigrationUtils.dropDBConstraint(connection, "community2community", "child_comm_id");
|
||||
// Drop the constraint associated with "item_id" column of "collection2item" table
|
||||
int return3 = MigrationUtils.dropDBConstraint(connection, "collection2item", "item_id", "fkey");
|
||||
int return3 = MigrationUtils.dropDBConstraint(connection, "collection2item", "item_id");
|
||||
|
||||
// Checksum will just be the sum of those three return values
|
||||
checksum = return1 + return2 + return3;
|
||||
|
@@ -13,8 +13,6 @@ import java.sql.SQLException;
|
||||
import org.dspace.storage.rdbms.MigrationUtils;
|
||||
import org.flywaydb.core.api.migration.MigrationChecksumProvider;
|
||||
import org.flywaydb.core.api.migration.jdbc.JdbcMigration;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This class is in support of the DS-1582 Metadata for All Objects feature.
|
||||
@@ -37,30 +35,27 @@ import org.slf4j.LoggerFactory;
|
||||
* <P>
|
||||
* This class represents a Flyway DB Java Migration
|
||||
* http://flywaydb.org/documentation/migration/java.html
|
||||
*
|
||||
*
|
||||
* @author Tim Donohue
|
||||
*/
|
||||
public class V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint
|
||||
implements JdbcMigration, MigrationChecksumProvider
|
||||
{
|
||||
/** logging category */
|
||||
private static final Logger log = LoggerFactory.getLogger(V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.class);
|
||||
|
||||
/* The checksum to report for this migration (when successful) */
|
||||
private int checksum = -1;
|
||||
|
||||
|
||||
/**
|
||||
* Actually migrate the existing database
|
||||
* @param connection
|
||||
* @param connection
|
||||
*/
|
||||
@Override
|
||||
public void migrate(Connection connection)
|
||||
throws IOException, SQLException
|
||||
{
|
||||
// Drop the constraint associated with "item_id" column of "metadatavalue"
|
||||
checksum = MigrationUtils.dropDBConstraint(connection, "metadatavalue", "item_id", "fkey");
|
||||
checksum = MigrationUtils.dropDBConstraint(connection, "metadatavalue", "item_id");
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return the checksum to be associated with this Migration
|
||||
* in the Flyway database table (schema_version).
|
||||
|
@@ -98,7 +98,7 @@ public class PubmedService
|
||||
|
||||
public List<Record> search(String query) throws IOException, HttpException
|
||||
{
|
||||
List<Record> results = null;
|
||||
List<Record> results = new ArrayList<>();
|
||||
if (!ConfigurationManager.getBooleanProperty(SubmissionLookupService.CFG_MODULE, "remoteservice.demo"))
|
||||
{
|
||||
HttpGet method = null;
|
||||
|
@@ -7,31 +7,28 @@
|
||||
*/
|
||||
package org.dspace.submit.step;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Enumeration;
|
||||
import java.util.Map;
|
||||
import java.util.HashMap;
|
||||
|
||||
import javax.servlet.http.HttpSession;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import org.dspace.app.util.SubmissionInfo;
|
||||
import org.dspace.app.util.Util;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Metadatum;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.license.CreativeCommons;
|
||||
import org.dspace.license.CCLookup;
|
||||
import org.dspace.submit.AbstractProcessingStep;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import javax.servlet.http.HttpSession;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.app.util.SubmissionInfo;
|
||||
import org.dspace.app.util.Util;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.license.CCLookup;
|
||||
import org.dspace.license.CreativeCommons;
|
||||
import org.dspace.submit.AbstractProcessingStep;
|
||||
|
||||
/**
|
||||
* CCLicense step for DSpace Submission Process.
|
||||
@@ -100,11 +97,6 @@ public class CCLicenseStep extends AbstractProcessingStep
|
||||
session.setAttribute("inProgress", "TRUE");
|
||||
// check what submit button was pressed in User Interface
|
||||
String buttonPressed = Util.getSubmitButton(request, NEXT_BUTTON);
|
||||
if ("submit_grant".equalsIgnoreCase(buttonPressed)
|
||||
|| "submit_no_cc".equalsIgnoreCase(buttonPressed))
|
||||
{
|
||||
return processCC(context, request, response, subInfo);
|
||||
}
|
||||
String choiceButton = Util.getSubmitButton(request, SELECT_CHANGE);
|
||||
Enumeration e = request.getParameterNames();
|
||||
String isFieldRequired = "FALSE";
|
||||
@@ -122,27 +114,10 @@ public class CCLicenseStep extends AbstractProcessingStep
|
||||
{
|
||||
Item item = subInfo.getSubmissionItem().getItem();
|
||||
CreativeCommons.MdField uriField = CreativeCommons.getCCField("uri");
|
||||
CreativeCommons.MdField nameField = CreativeCommons.getCCField("name");
|
||||
String licenseUri = uriField.ccItemValue(item);
|
||||
if (licenseUri != null)
|
||||
//if (CreativeCommons.hasLicense(item, "dc", "rights", "uri", Item.ANY)
|
||||
// && !CreativeCommons.getRightsURI(item, "dc", "rights", "uri", Item.ANY).equals(""))
|
||||
{
|
||||
//CreativeCommons.setItemMetadata(item, licenseURI, "dc", "rights", "uri", ConfigurationManager.getProperty("default.locale"));
|
||||
uriField.removeItemValue(item, licenseUri);
|
||||
if (ConfigurationManager.getBooleanProperty("cc.submit.setname"))
|
||||
{
|
||||
String licenseName = nameField.keyedItemValue(item, licenseUri);
|
||||
nameField.removeItemValue(item, licenseName);
|
||||
//CreativeCommons.setItemMetadata(item, CreativeCommons.getRightsName(item, "dc", "rights", null, Item.ANY), "dc", "rights", null, ConfigurationManager.getProperty("default.locale"));
|
||||
}
|
||||
if (ConfigurationManager.getBooleanProperty("cc.submit.addBitstream"))
|
||||
{
|
||||
CreativeCommons.removeLicense(context, item);
|
||||
}
|
||||
removeRequiredAttributes(session);
|
||||
item.update();
|
||||
context.commit();
|
||||
}
|
||||
return STATUS_COMPLETE;
|
||||
}
|
||||
@@ -152,7 +127,7 @@ public class CCLicenseStep extends AbstractProcessingStep
|
||||
}
|
||||
if (buttonPressed.equals(NEXT_BUTTON) || buttonPressed.equals(CANCEL_BUTTON) )
|
||||
{
|
||||
return processCCWS(context, request, response, subInfo);
|
||||
return processCC(context, request, response, subInfo);
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -162,54 +137,6 @@ public class CCLicenseStep extends AbstractProcessingStep
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process the input from the CC license page
|
||||
*
|
||||
* @param context
|
||||
* current DSpace context
|
||||
* @param request
|
||||
* current servlet request object
|
||||
* @param response
|
||||
* current servlet response object
|
||||
* @param subInfo
|
||||
* submission info object
|
||||
*
|
||||
* @return Status or error flag which will be processed by
|
||||
* doPostProcessing() below! (if STATUS_COMPLETE or 0 is returned,
|
||||
* no errors occurred!)
|
||||
*/
|
||||
protected int processCC(Context context, HttpServletRequest request,
|
||||
HttpServletResponse response, SubmissionInfo subInfo)
|
||||
throws ServletException, IOException, SQLException,
|
||||
AuthorizeException
|
||||
{
|
||||
String buttonPressed = Util.getSubmitButton(request, NEXT_BUTTON);
|
||||
|
||||
// RLR hack - need to distinguish between progress bar real submission
|
||||
// (if cc_license_url exists, then users has accepted the CC License)
|
||||
String ccLicenseUrl = request.getParameter("cc_license_url");
|
||||
|
||||
if (buttonPressed.equals("submit_no_cc"))
|
||||
{
|
||||
// Skipping the CC license - remove any existing license selection
|
||||
CreativeCommons.removeLicense(context, subInfo.getSubmissionItem()
|
||||
.getItem());
|
||||
}
|
||||
else if ((ccLicenseUrl != null) && (ccLicenseUrl.length() > 0))
|
||||
{
|
||||
Item item = subInfo.getSubmissionItem().getItem();
|
||||
|
||||
// save the CC license
|
||||
CreativeCommons.setLicense(context, item, ccLicenseUrl);
|
||||
}
|
||||
|
||||
// commit changes
|
||||
context.commit();
|
||||
|
||||
// completed without errors
|
||||
return STATUS_COMPLETE;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Process the input from the CC license page using CC Web service
|
||||
@@ -228,12 +155,11 @@ public class CCLicenseStep extends AbstractProcessingStep
|
||||
* doPostProcessing() below! (if STATUS_COMPLETE or 0 is returned,
|
||||
* no errors occurred!)
|
||||
*/
|
||||
protected int processCCWS(Context context, HttpServletRequest request,
|
||||
protected int processCC(Context context, HttpServletRequest request,
|
||||
HttpServletResponse response, SubmissionInfo subInfo)
|
||||
throws ServletException, IOException, SQLException,
|
||||
AuthorizeException {
|
||||
|
||||
String ccLicenseUrl = request.getParameter("cc_license_url");
|
||||
HttpSession session = request.getSession();
|
||||
Map<String, String> map = new HashMap<String, String>();
|
||||
String licenseclass = (request.getParameter("licenseclass_chooser") != null) ? request.getParameter("licenseclass_chooser") : "";
|
||||
@@ -245,39 +171,32 @@ public class CCLicenseStep extends AbstractProcessingStep
|
||||
map.put("sampling", request.getParameter("sampling_chooser"));
|
||||
}
|
||||
map.put("jurisdiction", jurisdiction);
|
||||
CCLookup ccLookup = new CCLookup();
|
||||
|
||||
CreativeCommons.MdField uriField = CreativeCommons.getCCField("uri");
|
||||
CreativeCommons.MdField nameField = CreativeCommons.getCCField("name");
|
||||
ccLookup.issue(licenseclass, map, ConfigurationManager.getProperty("cc.license.locale"));
|
||||
Item item = subInfo.getSubmissionItem().getItem();
|
||||
if (licenseclass.equals("xmlui.Submission.submit.CCLicenseStep.no_license"))
|
||||
if ("webui.Submission.submit.CCLicenseStep.no_license".equals(licenseclass) || "xmlui.Submission.submit.CCLicenseStep.no_license".equals(licenseclass))
|
||||
{
|
||||
// only remove any previous licenses
|
||||
String licenseUri = uriField.ccItemValue(item);
|
||||
if (licenseUri != null) {
|
||||
uriField.removeItemValue(item, licenseUri);
|
||||
if (ConfigurationManager.getBooleanProperty("cc.submit.setname"))
|
||||
{
|
||||
String licenseName = nameField.keyedItemValue(item, licenseUri);
|
||||
nameField.removeItemValue(item, licenseName);
|
||||
}
|
||||
if (ConfigurationManager.getBooleanProperty("cc.submit.addBitstream"))
|
||||
{
|
||||
CreativeCommons.removeLicense(context, item);
|
||||
}
|
||||
item.update();
|
||||
context.commit();
|
||||
removeRequiredAttributes(session);
|
||||
}
|
||||
CreativeCommons.removeLicense(context, uriField, nameField, item);
|
||||
|
||||
item.update();
|
||||
context.commit();
|
||||
removeRequiredAttributes(session);
|
||||
|
||||
return STATUS_COMPLETE;
|
||||
}
|
||||
else if (licenseclass.equals("xmlui.Submission.submit.CCLicenseStep.select_change"))
|
||||
else if (StringUtils.isBlank(licenseclass) || "webui.Submission.submit.CCLicenseStep.select_change".equals(licenseclass) || "xmlui.Submission.submit.CCLicenseStep.select_change".equals(licenseclass))
|
||||
{
|
||||
removeRequiredAttributes(session);
|
||||
return STATUS_COMPLETE;
|
||||
}
|
||||
else if (ccLookup.isSuccess())
|
||||
|
||||
CCLookup ccLookup = new CCLookup();
|
||||
ccLookup.issue(licenseclass, map, ConfigurationManager.getProperty("cc.license.locale"));
|
||||
if (ccLookup.isSuccess())
|
||||
{
|
||||
CreativeCommons.removeLicense(context, uriField, nameField, item);
|
||||
|
||||
uriField.addItemValue(item, ccLookup.getLicenseUrl());
|
||||
if (ConfigurationManager.getBooleanProperty("cc.submit.addbitstream")) {
|
||||
CreativeCommons.setLicenseRDF(context, item, ccLookup.getRdf());
|
||||
@@ -285,6 +204,7 @@ public class CCLicenseStep extends AbstractProcessingStep
|
||||
if (ConfigurationManager.getBooleanProperty("cc.submit.setname")) {
|
||||
nameField.addItemValue(item, ccLookup.getLicenseName());
|
||||
}
|
||||
|
||||
item.update();
|
||||
context.commit();
|
||||
removeRequiredAttributes(session);
|
||||
@@ -302,7 +222,8 @@ public class CCLicenseStep extends AbstractProcessingStep
|
||||
}
|
||||
return STATUS_COMPLETE;
|
||||
}
|
||||
|
||||
|
||||
|
||||
private void removeRequiredAttributes(HttpSession session) {
|
||||
session.removeAttribute("ccError");
|
||||
session.removeAttribute("isFieldRequired");
|
||||
|
@@ -11,13 +11,16 @@ import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Enumeration;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import org.dspace.app.util.SubmissionInfo;
|
||||
import org.dspace.app.util.Util;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
@@ -26,8 +29,8 @@ import org.dspace.content.BitstreamFormat;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.FormatIdentifier;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.curate.Curator;
|
||||
import org.dspace.submit.AbstractProcessingStep;
|
||||
|
||||
@@ -261,6 +264,44 @@ public class UploadStep extends AbstractProcessingStep
|
||||
// -------------------------------------------------
|
||||
// Step #3: Check for a change in file description
|
||||
// -------------------------------------------------
|
||||
// We have to check for descriptions from users using the resumable upload
|
||||
// and from users using the simple upload.
|
||||
// Beginning with the resumable ones.
|
||||
Enumeration<String> parameterNames = request.getParameterNames();
|
||||
Map<String, String> descriptions = new HashMap<String, String>();
|
||||
while (parameterNames.hasMoreElements())
|
||||
{
|
||||
String name = parameterNames.nextElement();
|
||||
if (StringUtils.startsWithIgnoreCase(name, "description["))
|
||||
{
|
||||
descriptions.put(
|
||||
name.substring("description[".length(), name.length()-1),
|
||||
request.getParameter(name));
|
||||
}
|
||||
}
|
||||
if (!descriptions.isEmpty())
|
||||
{
|
||||
// we got descriptions from the resumable upload
|
||||
if (item != null)
|
||||
{
|
||||
Bundle[] bundles = item.getBundles("ORIGINAL");
|
||||
for (Bundle bundle : bundles)
|
||||
{
|
||||
Bitstream[] bitstreams = bundle.getBitstreams();
|
||||
for (Bitstream bitstream : bitstreams)
|
||||
{
|
||||
if (descriptions.containsKey(bitstream.getName()))
|
||||
{
|
||||
bitstream.setDescription(descriptions.get(bitstream.getName()));
|
||||
bitstream.update();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return STATUS_COMPLETE;
|
||||
}
|
||||
|
||||
// Going on with descriptions from the simple upload
|
||||
String fileDescription = request.getParameter("description");
|
||||
|
||||
if (fileDescription != null && fileDescription.length() > 0)
|
||||
|
@@ -10,6 +10,11 @@ package org.dspace.submit.step;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Enumeration;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Date;
|
||||
import java.util.Enumeration;
|
||||
|
||||
@@ -18,6 +23,7 @@ import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.commons.lang.time.DateUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.app.util.SubmissionInfo;
|
||||
import org.dspace.app.util.Util;
|
||||
@@ -229,6 +235,44 @@ public class UploadWithEmbargoStep extends UploadStep
|
||||
// -------------------------------------------------
|
||||
// Step #3: Check for a change in file description
|
||||
// -------------------------------------------------
|
||||
// We have to check for descriptions from users using the resumable upload
|
||||
// and from users using the simple upload.
|
||||
// Beginning with the resumable ones.
|
||||
Enumeration<String> parameterNames = request.getParameterNames();
|
||||
Map<String, String> descriptions = new HashMap<String, String>();
|
||||
while (parameterNames.hasMoreElements())
|
||||
{
|
||||
String name = parameterNames.nextElement();
|
||||
if (StringUtils.startsWithIgnoreCase(name, "description["))
|
||||
{
|
||||
descriptions.put(
|
||||
name.substring("description[".length(), name.length()-1),
|
||||
request.getParameter(name));
|
||||
}
|
||||
}
|
||||
if (!descriptions.isEmpty())
|
||||
{
|
||||
// we got descriptions from the resumable upload
|
||||
if (item != null)
|
||||
{
|
||||
Bundle[] bundles = item.getBundles("ORIGINAL");
|
||||
for (Bundle bundle : bundles)
|
||||
{
|
||||
Bitstream[] bitstreams = bundle.getBitstreams();
|
||||
for (Bitstream bitstream : bitstreams)
|
||||
{
|
||||
if (descriptions.containsKey(bitstream.getName()))
|
||||
{
|
||||
bitstream.setDescription(descriptions.get(bitstream.getName()));
|
||||
bitstream.update();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return STATUS_COMPLETE;
|
||||
}
|
||||
|
||||
// Going on with descriptions from the simple upload
|
||||
String fileDescription = request.getParameter("description");
|
||||
|
||||
if (fileDescription != null && fileDescription.length() > 0)
|
||||
@@ -352,7 +396,7 @@ public class UploadWithEmbargoStep extends UploadStep
|
||||
String fileDescription = (String) request.getAttribute(param + "-description");
|
||||
if(fileDescription==null ||fileDescription.length()==0)
|
||||
{
|
||||
request.getParameter("description");
|
||||
fileDescription = request.getParameter("description");
|
||||
}
|
||||
|
||||
// if information wasn't passed by User Interface, we had a problem
|
||||
|
719
dspace-api/src/main/java/org/dspace/util/SolrImportExport.java
Normal file
719
dspace-api/src/main/java/org/dspace/util/SolrImportExport.java
Normal file
@@ -0,0 +1,719 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.util;
|
||||
|
||||
import org.apache.commons.cli.*;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.client.solrj.impl.HttpSolrServer;
|
||||
import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
|
||||
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
|
||||
import org.apache.solr.client.solrj.request.CoreAdminRequest;
|
||||
import org.apache.solr.client.solrj.request.LukeRequest;
|
||||
import org.apache.solr.client.solrj.response.CoreAdminResponse;
|
||||
import org.apache.solr.client.solrj.response.FieldStatsInfo;
|
||||
import org.apache.solr.client.solrj.response.LukeResponse;
|
||||
import org.apache.solr.client.solrj.response.RangeFacet;
|
||||
import org.apache.solr.common.luke.FieldFlag;
|
||||
import org.apache.solr.common.params.CoreAdminParams;
|
||||
import org.apache.solr.common.params.FacetParams;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FilenameFilter;
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.nio.file.FileStore;
|
||||
import java.text.*;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* Utility class to export, clear and import Solr indexes.
|
||||
* @author Andrea Schweer schweer@waikato.ac.nz for the LCoNZ Institutional Research Repositories
|
||||
*/
|
||||
public class SolrImportExport
|
||||
{
|
||||
|
||||
private static final DateFormat SOLR_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
|
||||
private static final DateFormat SOLR_DATE_FORMAT_NO_MS = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
|
||||
private static final DateFormat EXPORT_DATE_FORMAT = new SimpleDateFormat("yyyy-MM");
|
||||
|
||||
static
|
||||
{
|
||||
SOLR_DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("UTC"));
|
||||
EXPORT_DATE_FORMAT.setTimeZone(TimeZone.getDefault());
|
||||
}
|
||||
|
||||
private static final String ACTION_OPTION = "a";
|
||||
private static final String CLEAR_OPTION = "c";
|
||||
private static final String DIRECTORY_OPTION = "d";
|
||||
private static final String HELP_OPTION = "h";
|
||||
private static final String INDEX_NAME_OPTION = "i";
|
||||
private static final String KEEP_OPTION = "k";
|
||||
private static final String LAST_OPTION = "l";
|
||||
|
||||
public static final int ROWS_PER_FILE = 10_000;
|
||||
|
||||
private static final Logger log = Logger.getLogger(SolrImportExport.class);
|
||||
|
||||
/**
|
||||
* Entry point for command-line invocation
|
||||
* @param args command-line arguments; see help for description
|
||||
* @throws ParseException if the command-line arguments cannot be parsed
|
||||
*/
|
||||
public static void main(String[] args) throws ParseException
|
||||
{
|
||||
CommandLineParser parser = new PosixParser();
|
||||
Options options = makeOptions();
|
||||
|
||||
try
|
||||
{
|
||||
CommandLine line = parser.parse(options, args);
|
||||
if (line.hasOption(HELP_OPTION))
|
||||
{
|
||||
printHelpAndExit(options, 0);
|
||||
}
|
||||
|
||||
if (!line.hasOption(INDEX_NAME_OPTION))
|
||||
{
|
||||
System.err.println("This command requires the index-name option but none was present.");
|
||||
printHelpAndExit(options, 1);
|
||||
}
|
||||
String[] indexNames = line.getOptionValues(INDEX_NAME_OPTION);
|
||||
|
||||
String directoryName = makeDirectoryName(line.getOptionValue(DIRECTORY_OPTION));
|
||||
|
||||
String action = line.getOptionValue(ACTION_OPTION, "export");
|
||||
if ("import".equals(action))
|
||||
{
|
||||
for (String indexName : indexNames)
|
||||
{
|
||||
File importDir = new File(directoryName);
|
||||
if (!importDir.exists() || !importDir.canRead())
|
||||
{
|
||||
System.err.println("Import directory " + directoryName
|
||||
+ " doesn't exist or is not readable by the current user. Not importing index "
|
||||
+ indexName);
|
||||
continue; // skip this index
|
||||
}
|
||||
try
|
||||
{
|
||||
String solrUrl = makeSolrUrl(indexName);
|
||||
boolean clear = line.hasOption(CLEAR_OPTION);
|
||||
importIndex(indexName, importDir, solrUrl, clear, clear);
|
||||
}
|
||||
catch (IOException | SolrServerException | SolrImportExportException e)
|
||||
{
|
||||
System.err.println("Problem encountered while trying to import index " + indexName + ".");
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if ("export".equals(action))
|
||||
{
|
||||
for (String indexName : indexNames)
|
||||
{
|
||||
String lastValue = line.getOptionValue(LAST_OPTION);
|
||||
File exportDir = new File(directoryName);
|
||||
if (exportDir.exists() && !exportDir.canWrite())
|
||||
{
|
||||
System.err.println("Export directory " + directoryName
|
||||
+ " is not writable by the current user. Not exporting index "
|
||||
+ indexName);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!exportDir.exists())
|
||||
{
|
||||
boolean created = exportDir.mkdirs();
|
||||
if (!created)
|
||||
{
|
||||
System.err.println("Export directory " + directoryName
|
||||
+ " could not be created. Not exporting index " + indexName);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
String solrUrl = makeSolrUrl(indexName);
|
||||
String timeField = makeTimeField(indexName);
|
||||
exportIndex(indexName, exportDir, solrUrl, timeField, lastValue);
|
||||
}
|
||||
catch (SolrServerException | IOException | SolrImportExportException e)
|
||||
{
|
||||
System.err.println("Problem encountered while trying to export index " + indexName + ".");
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if ("reindex".equals(action))
|
||||
{
|
||||
for (String indexName : indexNames)
|
||||
{
|
||||
try {
|
||||
boolean keepExport = line.hasOption(KEEP_OPTION);
|
||||
reindex(indexName, directoryName, keepExport);
|
||||
} catch (IOException | SolrServerException | SolrImportExportException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
System.err.println("Unknown action " + action + "; must be import, export or reindex.");
|
||||
printHelpAndExit(options, 1);
|
||||
}
|
||||
}
|
||||
catch (ParseException e)
|
||||
{
|
||||
System.err.println("Cannot read command options");
|
||||
printHelpAndExit(options, 1);
|
||||
}
|
||||
}
|
||||
|
||||
private static Options makeOptions() {
|
||||
Options options = new Options();
|
||||
options.addOption(ACTION_OPTION, "action", true, "The action to perform: import, export or reindex. Default: export.");
|
||||
options.addOption(CLEAR_OPTION, "clear", false, "When importing, also clear the index first. Ignored when action is export or reindex.");
|
||||
options.addOption(DIRECTORY_OPTION, "directory", true,
|
||||
"The absolute path for the directory to use for import or export. If omitted, [dspace]/solr-export is used.");
|
||||
options.addOption(HELP_OPTION, "help", false, "Get help on options for this command.");
|
||||
options.addOption(INDEX_NAME_OPTION, "index-name", true,
|
||||
"The names of the indexes to process. At least one is required. Available indexes are: authority, statistics.");
|
||||
options.addOption(KEEP_OPTION, "keep", false, "When reindexing, keep the contents of the data export directory." +
|
||||
" By default, the contents of this directory will be deleted once the reindex has finished." +
|
||||
" Ignored when action is export or import.");
|
||||
options.addOption(LAST_OPTION, "last", true, "When exporting, export records from the last [timeperiod] only." +
|
||||
" This can be one of: 'd' (beginning of yesterday through to now);" +
|
||||
" 'm' (beginning of the previous month through to end of the previous month);" +
|
||||
" a number, in which case the last [number] of days are exported, through to now (use 0 for today's data)." +
|
||||
" Date calculation is done in UTC. If omitted, all documents are exported.");
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reindexes the specified core
|
||||
*
|
||||
* @param indexName the name of the core to reindex
|
||||
* @param exportDirName the name of the directory to use for export. If this directory doesn't exist, it will be created.
|
||||
* @param keepExport whether to keep the contents of the exportDir after the reindex. If keepExport is false and the
|
||||
* export directory was created by this method, the export directory will be deleted at the end of the reimport.
|
||||
*/
|
||||
private static void reindex(String indexName, String exportDirName, boolean keepExport)
|
||||
throws IOException, SolrServerException, SolrImportExportException {
|
||||
String tempIndexName = indexName + "-temp";
|
||||
|
||||
String origSolrUrl = makeSolrUrl(indexName);
|
||||
String baseSolrUrl = StringUtils.substringBeforeLast(origSolrUrl, "/"); // need to get non-core solr URL
|
||||
String tempSolrUrl = baseSolrUrl + "/" + tempIndexName;
|
||||
|
||||
String solrInstanceDir = ConfigurationManager.getProperty("dspace.dir") + File.separator + "solr" + File.separator + indexName;
|
||||
// the [dspace]/solr/[indexName]/conf directory needs to be available on the local machine for this to work
|
||||
// -- we need access to the schema.xml and solrconfig.xml file, plus files referenced from there
|
||||
// if this directory can't be found, output an error message and skip this index
|
||||
File solrInstance = new File(solrInstanceDir);
|
||||
if (!solrInstance.exists() || !solrInstance.canRead() || !solrInstance.isDirectory())
|
||||
{
|
||||
throw new SolrImportExportException("Directory " + solrInstanceDir + "/conf/ doesn't exist or isn't readable." +
|
||||
" The reindexing process requires the Solr configuration directory for this index to be present on the local machine" +
|
||||
" even if Solr is running on a different host. Not reindexing index " + indexName);
|
||||
}
|
||||
|
||||
String timeField = makeTimeField(indexName);
|
||||
|
||||
// Ensure the export directory exists and is writable
|
||||
File exportDir = new File(exportDirName);
|
||||
boolean createdExportDir = exportDir.mkdirs();
|
||||
if (!createdExportDir && !exportDir.exists())
|
||||
{
|
||||
throw new SolrImportExportException("Could not create export directory " + exportDirName);
|
||||
}
|
||||
if (!exportDir.canWrite())
|
||||
{
|
||||
throw new SolrImportExportException("Can't write to export directory " + exportDirName);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
HttpSolrServer adminSolr = new HttpSolrServer(baseSolrUrl);
|
||||
|
||||
// try to find out size of core and compare with free space in export directory
|
||||
CoreAdminResponse status = CoreAdminRequest.getStatus(indexName, adminSolr);
|
||||
Object coreSizeObj = status.getCoreStatus(indexName).get("sizeInBytes");
|
||||
long coreSize = coreSizeObj != null ? Long.valueOf(coreSizeObj.toString()) : -1;
|
||||
long usableExportSpace = exportDir.getUsableSpace();
|
||||
if (coreSize >= 0 && usableExportSpace < coreSize)
|
||||
{
|
||||
System.err.println("Not enough space in export directory " + exportDirName
|
||||
+ "; need at least as much space as the index ("
|
||||
+ FileUtils.byteCountToDisplaySize(coreSize)
|
||||
+ ") but usable space in export directory is only "
|
||||
+ FileUtils.byteCountToDisplaySize(usableExportSpace)
|
||||
+ ". Not continuing with reindex, please use the " + DIRECTORY_OPTION
|
||||
+ " option to specify an alternative export directy with sufficient space.");
|
||||
return;
|
||||
}
|
||||
|
||||
// Create a temp directory to store temporary core data
|
||||
File tempDataDir = new File(ConfigurationManager.getProperty("dspace.dir") + File.separator + "temp" + File.separator + "solr-data");
|
||||
boolean createdTempDataDir = tempDataDir.mkdirs();
|
||||
if (!createdTempDataDir && !tempDataDir.exists())
|
||||
{
|
||||
throw new SolrImportExportException("Could not create temporary data directory " + tempDataDir.getCanonicalPath());
|
||||
}
|
||||
if (!tempDataDir.canWrite())
|
||||
{
|
||||
throw new SolrImportExportException("Can't write to temporary data directory " + tempDataDir.getCanonicalPath());
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// create a temporary core to hold documents coming in during the reindex
|
||||
CoreAdminRequest.Create createRequest = new CoreAdminRequest.Create();
|
||||
createRequest.setInstanceDir(solrInstanceDir);
|
||||
createRequest.setDataDir(tempDataDir.getCanonicalPath());
|
||||
createRequest.setCoreName(tempIndexName);
|
||||
|
||||
createRequest.process(adminSolr).getStatus();
|
||||
}
|
||||
catch (SolrServerException e)
|
||||
{
|
||||
// try to continue -- it may just be that the core already existed from a previous, failed attempt
|
||||
System.err.println("Caught exception when trying to create temporary core: " + e.getMessage() + "; trying to recover.");
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
|
||||
// swap actual core with temporary one
|
||||
CoreAdminRequest swapRequest = new CoreAdminRequest();
|
||||
swapRequest.setCoreName(indexName);
|
||||
swapRequest.setOtherCoreName(tempIndexName);
|
||||
swapRequest.setAction(CoreAdminParams.CoreAdminAction.SWAP);
|
||||
swapRequest.process(adminSolr);
|
||||
|
||||
try
|
||||
{
|
||||
// export from the actual core (from temp core name, actual data dir)
|
||||
exportIndex(indexName, exportDir, tempSolrUrl, timeField);
|
||||
|
||||
// clear actual core (temp core name, clearing actual data dir) & import
|
||||
importIndex(indexName, exportDir, tempSolrUrl, true, true);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
// we ran into some problems with the export/import -- keep going to try and restore the solr cores
|
||||
System.err.println("Encountered problem during reindex: " + e.getMessage() + ", will attempt to restore Solr cores");
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
|
||||
// commit changes
|
||||
HttpSolrServer origSolr = new HttpSolrServer(origSolrUrl);
|
||||
origSolr.commit();
|
||||
|
||||
// swap back (statistics now going to actual core name in actual data dir)
|
||||
swapRequest = new CoreAdminRequest();
|
||||
swapRequest.setCoreName(tempIndexName);
|
||||
swapRequest.setOtherCoreName(indexName);
|
||||
swapRequest.setAction(CoreAdminParams.CoreAdminAction.SWAP);
|
||||
swapRequest.process(adminSolr);
|
||||
|
||||
// export all docs from now-temp core into export directory -- this won't cause name collisions with the actual export
|
||||
// because the core name for the temporary export has -temp in it while the actual core doesn't
|
||||
exportIndex(tempIndexName, exportDir, tempSolrUrl, timeField);
|
||||
// ...and import them into the now-again-actual core *without* clearing
|
||||
importIndex(tempIndexName, exportDir, origSolrUrl, false, true);
|
||||
|
||||
// commit changes
|
||||
origSolr.commit();
|
||||
|
||||
// unload now-temp core (temp core name)
|
||||
CoreAdminRequest.unloadCore(tempIndexName, false, false, adminSolr);
|
||||
|
||||
// clean up temporary data dir if this method created it
|
||||
if (createdTempDataDir && tempDataDir.exists())
|
||||
{
|
||||
FileUtils.deleteDirectory(tempDataDir);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
// clean up export dir if appropriate
|
||||
if (!keepExport && createdExportDir && exportDir.exists())
|
||||
{
|
||||
FileUtils.deleteDirectory(exportDir);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Exports all documents in the given index to the specified target directory in batches of #ROWS_PER_FILE.
|
||||
* See #makeExportFilename for the file names that are generated.
|
||||
*
|
||||
* @param indexName The index to export.
|
||||
* @param toDir The target directory for the export. Will be created if it doesn't exist yet. The directory must be writeable.
|
||||
* @param solrUrl The solr URL for the index to export. Must not be null.
|
||||
* @param timeField The time field to use for sorting the export. Must not be null.
|
||||
* @throws SolrServerException if there is a problem with exporting the index.
|
||||
* @throws IOException if there is a problem creating the files or communicating with Solr.
|
||||
* @throws SolrImportExportException if there is a problem in communicating with Solr.
|
||||
*/
|
||||
public static void exportIndex(String indexName, File toDir, String solrUrl, String timeField)
|
||||
throws SolrServerException, SolrImportExportException, IOException {
|
||||
exportIndex(indexName, toDir, solrUrl, timeField, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Import previously exported documents (or externally created CSV files that have the appropriate structure) into the specified index.
|
||||
* @param indexName the index to import.
|
||||
* @param fromDir the source directory. Must exist and be readable.
|
||||
* The importer will look for files whose name starts with <pre>indexName</pre>
|
||||
* and ends with .csv (to match what is generated by #makeExportFilename).
|
||||
* @param solrUrl The solr URL for the index to export. Must not be null.
|
||||
* @param clear if true, clear the index before importing.
|
||||
* @param overwrite if true, skip _version_ field on import to disable Solr's optimistic concurrency functionality
|
||||
* @throws IOException if there is a problem reading the files or communicating with Solr.
|
||||
* @throws SolrServerException if there is a problem reading the files or communicating with Solr.
|
||||
* @throws SolrImportExportException if there is a problem communicating with Solr.
|
||||
*/
|
||||
public static void importIndex(final String indexName, File fromDir, String solrUrl, boolean clear, boolean overwrite)
|
||||
throws IOException, SolrServerException, SolrImportExportException
|
||||
{
|
||||
if (StringUtils.isBlank(solrUrl))
|
||||
{
|
||||
throw new SolrImportExportException("Could not construct solr URL for index" + indexName + ", aborting export.");
|
||||
}
|
||||
|
||||
if (!fromDir.exists() || !fromDir.canRead())
|
||||
{
|
||||
throw new SolrImportExportException("Source directory " + fromDir
|
||||
+ " doesn't exist or isn't readable, aborting export of index "
|
||||
+ indexName);
|
||||
}
|
||||
|
||||
HttpSolrServer solr = new HttpSolrServer(solrUrl);
|
||||
|
||||
// must get multivalue fields before clearing
|
||||
List<String> multivaluedFields = getMultiValuedFields(solr);
|
||||
|
||||
if (clear)
|
||||
{
|
||||
clearIndex(solrUrl);
|
||||
}
|
||||
|
||||
File[] files = fromDir.listFiles(new FilenameFilter()
|
||||
{
|
||||
@Override
|
||||
public boolean accept(File dir, String name)
|
||||
{
|
||||
return name.startsWith(indexName) && name.endsWith(".csv");
|
||||
}
|
||||
});
|
||||
|
||||
if (files == null || files.length == 0)
|
||||
{
|
||||
log.warn("No export files found in directory " + fromDir.getCanonicalPath() + " for index " + indexName);
|
||||
return;
|
||||
}
|
||||
|
||||
Arrays.sort(files);
|
||||
|
||||
for (File file : files)
|
||||
{
|
||||
log.info("Importing file " + file.getCanonicalPath());
|
||||
ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest("/update/csv");
|
||||
if (overwrite)
|
||||
{
|
||||
contentStreamUpdateRequest.setParam("skip", "_version_");
|
||||
}
|
||||
for (String mvField : multivaluedFields) {
|
||||
contentStreamUpdateRequest.setParam("f." + mvField + ".split", "true");
|
||||
contentStreamUpdateRequest.setParam("f." + mvField + ".escape", "\\");
|
||||
}
|
||||
contentStreamUpdateRequest.setParam("stream.contentType", "text/csv;charset=utf-8");
|
||||
contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
|
||||
contentStreamUpdateRequest.addFile(file, "text/csv;charset=utf-8");
|
||||
|
||||
solr.request(contentStreamUpdateRequest);
|
||||
}
|
||||
|
||||
solr.commit(true, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine the names of all multi-valued fields from the data in the index.
|
||||
* @param solr the solr server to query.
|
||||
* @return A list containing all multi-valued fields, or an empty list if none are found / there aren't any.
|
||||
*/
|
||||
private static List<String> getMultiValuedFields(HttpSolrServer solr)
|
||||
{
|
||||
List<String> result = new ArrayList<>();
|
||||
try
|
||||
{
|
||||
LukeRequest request = new LukeRequest();
|
||||
// this needs to be a non-schema request, otherwise we'll miss dynamic fields
|
||||
LukeResponse response = request.process(solr);
|
||||
Map<String, LukeResponse.FieldInfo> fields = response.getFieldInfo();
|
||||
for (LukeResponse.FieldInfo info : fields.values())
|
||||
{
|
||||
if (info.getSchema().contains(FieldFlag.MULTI_VALUED.getAbbreviation() + ""))
|
||||
{
|
||||
result.add(info.getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (IOException | SolrServerException e)
|
||||
{
|
||||
log.fatal("Cannot determine which fields are multi valued: " + e.getMessage(), e);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all documents from the Solr index with the given URL, then commit and optimise the index.
|
||||
*
|
||||
* @throws IOException if there is a problem in communicating with Solr.
|
||||
* @throws SolrServerException if there is a problem in communicating with Solr.
|
||||
* @param solrUrl URL of the Solr core to clear.
|
||||
*/
|
||||
public static void clearIndex(String solrUrl) throws IOException, SolrServerException
|
||||
{
|
||||
HttpSolrServer solr = new HttpSolrServer(solrUrl);
|
||||
solr.deleteByQuery("*:*");
|
||||
solr.commit();
|
||||
solr.optimize();
|
||||
}
|
||||
|
||||
/**
|
||||
* Exports documents from the given index to the specified target directory in batches of #ROWS_PER_FILE, starting at fromWhen (or all documents).
|
||||
* See #makeExportFilename for the file names that are generated.
|
||||
*
|
||||
* @param indexName The index to export.
|
||||
* @param toDir The target directory for the export. Will be created if it doesn't exist yet. The directory must be writeable.
|
||||
* @param solrUrl The solr URL for the index to export. Must not be null.
|
||||
* @param timeField The time field to use for sorting the export. Must not be null.
|
||||
* @param fromWhen Optionally, from when to export. See options for allowed values. If null or empty, all documents will be exported.
|
||||
* @throws SolrServerException if there is a problem with exporting the index.
|
||||
* @throws IOException if there is a problem creating the files or communicating with Solr.
|
||||
* @throws SolrImportExportException if there is a problem in communicating with Solr.
|
||||
*/
|
||||
public static void exportIndex(String indexName, File toDir, String solrUrl, String timeField, String fromWhen)
|
||||
throws SolrServerException, IOException, SolrImportExportException
|
||||
{
|
||||
if (StringUtils.isBlank(solrUrl))
|
||||
{
|
||||
throw new SolrImportExportException("Could not construct solr URL for index" + indexName + ", aborting export.");
|
||||
}
|
||||
|
||||
if (!toDir.exists() || !toDir.canWrite())
|
||||
{
|
||||
throw new SolrImportExportException("Target directory " + toDir
|
||||
+ " doesn't exist or is not writable, aborting export of index "
|
||||
+ indexName);
|
||||
}
|
||||
|
||||
HttpSolrServer solr = new HttpSolrServer(solrUrl);
|
||||
|
||||
SolrQuery query = new SolrQuery("*:*");
|
||||
if (StringUtils.isNotBlank(fromWhen))
|
||||
{
|
||||
String lastValueFilter = makeFilterQuery(timeField, fromWhen);
|
||||
if (StringUtils.isNotBlank(lastValueFilter))
|
||||
{
|
||||
query.addFilterQuery(lastValueFilter);
|
||||
}
|
||||
}
|
||||
|
||||
query.setRows(0);
|
||||
query.setGetFieldStatistics(timeField);
|
||||
Map<String, FieldStatsInfo> fieldInfo = solr.query(query).getFieldStatsInfo();
|
||||
if (fieldInfo == null || !fieldInfo.containsKey(timeField)) {
|
||||
log.warn("Cannot get earliest date, not exporting index " + indexName + ", time field " + timeField + ", from " + fromWhen);
|
||||
return;
|
||||
}
|
||||
FieldStatsInfo timeFieldInfo = fieldInfo.get(timeField);
|
||||
if (timeFieldInfo == null || timeFieldInfo.getMin() == null) {
|
||||
log.warn("Cannot get earliest date, not exporting index " + indexName + ", time field " + timeField + ", from " + fromWhen);
|
||||
return;
|
||||
}
|
||||
Date earliestTimestamp = (Date) timeFieldInfo.getMin();
|
||||
|
||||
query.setGetFieldStatistics(false);
|
||||
query.clearSorts();
|
||||
query.setRows(0);
|
||||
query.setFacet(true);
|
||||
query.add(FacetParams.FACET_RANGE, timeField);
|
||||
query.add(FacetParams.FACET_RANGE_START, SOLR_DATE_FORMAT.format(earliestTimestamp) + "/MONTH");
|
||||
query.add(FacetParams.FACET_RANGE_END, "NOW/MONTH+1MONTH");
|
||||
query.add(FacetParams.FACET_RANGE_GAP, "+1MONTH");
|
||||
query.setFacetMinCount(1);
|
||||
|
||||
List<RangeFacet.Count> monthFacets = solr.query(query).getFacetRanges().get(0).getCounts();
|
||||
|
||||
for (RangeFacet.Count monthFacet : monthFacets) {
|
||||
Date monthStartDate;
|
||||
String monthStart = monthFacet.getValue();
|
||||
try
|
||||
{
|
||||
monthStartDate = SOLR_DATE_FORMAT_NO_MS.parse(monthStart);
|
||||
}
|
||||
catch (java.text.ParseException e)
|
||||
{
|
||||
throw new SolrImportExportException("Could not read start of month batch as date: " + monthStart, e);
|
||||
}
|
||||
int docsThisMonth = monthFacet.getCount();
|
||||
|
||||
SolrQuery monthQuery = new SolrQuery("*:*");
|
||||
monthQuery.setRows(ROWS_PER_FILE);
|
||||
monthQuery.set("wt", "csv");
|
||||
monthQuery.set("fl", "*");
|
||||
|
||||
monthQuery.addFilterQuery(timeField + ":[" +monthStart + " TO " + monthStart + "+1MONTH]");
|
||||
|
||||
for (int i = 0; i < docsThisMonth; i+= ROWS_PER_FILE)
|
||||
{
|
||||
monthQuery.setStart(i);
|
||||
URL url = new URL(solrUrl + "/select?" + monthQuery.toString());
|
||||
|
||||
File file = new File(toDir.getCanonicalPath(), makeExportFilename(indexName, monthStartDate, docsThisMonth, i));
|
||||
if (file.createNewFile())
|
||||
{
|
||||
FileUtils.copyURLToFile(url, file);
|
||||
log.info("Exported batch " + i + " to " + file.getCanonicalPath());
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new SolrImportExportException("Could not create file " + file.getCanonicalPath()
|
||||
+ " while exporting index " + indexName
|
||||
+ ", month" + monthStart
|
||||
+ ", batch " + i);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a filter query that represents the export date range passed in as lastValue
|
||||
* @param timeField the time field to use for the date range
|
||||
* @param lastValue the requested date range, see options for acceptable values
|
||||
* @return a filter query representing the date range, or null if no suitable date range can be created.
|
||||
*/
|
||||
private static String makeFilterQuery(String timeField, String lastValue) {
|
||||
if ("m".equals(lastValue))
|
||||
{
|
||||
// export data from the previous month
|
||||
return timeField + ":[NOW/MONTH-1MONTH TO NOW/MONTH]";
|
||||
}
|
||||
|
||||
int days;
|
||||
if ("d".equals(lastValue))
|
||||
{
|
||||
days = 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
// other acceptable value: a number, specifying how many days back to export
|
||||
days = Integer.valueOf(lastValue); // TODO check value?
|
||||
}
|
||||
return timeField + ":[NOW/DAY-" + days + "DAYS TO " + SOLR_DATE_FORMAT.format(new Date()) + "]";
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the specified directory name or fall back to a default value.
|
||||
*
|
||||
* @param directoryValue a specific directory name. Optional.
|
||||
* @return directoryValue if given as a non-blank string. A default directory otherwise.
|
||||
*/
|
||||
private static String makeDirectoryName(String directoryValue)
|
||||
{
|
||||
if (StringUtils.isNotBlank(directoryValue))
|
||||
{
|
||||
return directoryValue;
|
||||
}
|
||||
return ConfigurationManager.getProperty("dspace.dir") + File.separator + "solr-export" + File.separator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a filename for the export batch.
|
||||
*
|
||||
* @param indexName The name of the index being exported.
|
||||
* @param exportStart The start timestamp of the export
|
||||
* @param totalRecords The total number of records in the export.
|
||||
* @param index The index of the current batch.
|
||||
* @return A file name that is appropriate to use for exporting the batch of data described by the parameters.
|
||||
*/
|
||||
private static String makeExportFilename(String indexName, Date exportStart, long totalRecords, int index)
|
||||
{
|
||||
String exportFileNumber = "";
|
||||
if (totalRecords > ROWS_PER_FILE) {
|
||||
exportFileNumber = StringUtils.leftPad("" + (index / ROWS_PER_FILE), (int) Math.ceil(Math.log10(totalRecords / ROWS_PER_FILE)), "0");
|
||||
}
|
||||
return indexName
|
||||
+ "_export_"
|
||||
+ EXPORT_DATE_FORMAT.format(exportStart)
|
||||
+ (StringUtils.isNotBlank(exportFileNumber) ? "_" + exportFileNumber : "")
|
||||
+ ".csv";
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the full URL for the specified index name.
|
||||
*
|
||||
* @param indexName the index name whose Solr URL is required. If the index name starts with
|
||||
* "statistics" or is "authority", the Solr base URL will be looked up
|
||||
* in the corresponding DSpace configuration file. Otherwise, it will fall back to a default.
|
||||
* @return the full URL to the Solr index, as a String.
|
||||
*/
|
||||
private static String makeSolrUrl(String indexName)
|
||||
{
|
||||
if (indexName.startsWith("statistics"))
|
||||
{
|
||||
// TODO account for year shards properly?
|
||||
return ConfigurationManager.getProperty("solr-statistics", "server") + indexName.replaceFirst("statistics", "");
|
||||
}
|
||||
else if ("authority".equals(indexName))
|
||||
{
|
||||
return ConfigurationManager.getProperty("solr.authority.server");
|
||||
}
|
||||
return "http://localhost:8080/solr/" + indexName; // TODO better default?
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a time field for the specified index name that is suitable for incremental export.
|
||||
*
|
||||
* @param indexName the index name whose Solr URL is required.
|
||||
* @return the name of the time field, or null if no suitable field can be determined.
|
||||
*/
|
||||
private static String makeTimeField(String indexName)
|
||||
{
|
||||
if (indexName.startsWith("statistics"))
|
||||
{
|
||||
return "time";
|
||||
}
|
||||
else if ("authority".equals(indexName))
|
||||
{
|
||||
return "last_modified_date";
|
||||
}
|
||||
return null; // TODO some sort of default?
|
||||
}
|
||||
|
||||
/**
|
||||
* A utility method to print out all available command-line options and exit given the specified code.
|
||||
*
|
||||
* @param options the supported options.
|
||||
* @param exitCode the exit code to use. The method will call System#exit(int) with the given code.
|
||||
*/
|
||||
private static void printHelpAndExit(Options options, int exitCode)
|
||||
{
|
||||
HelpFormatter myhelp = new HelpFormatter();
|
||||
myhelp.printHelp(SolrImportExport.class.getSimpleName() + "\n", options);
|
||||
System.exit(exitCode);
|
||||
}
|
||||
}
|
@@ -0,0 +1,24 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.util;
|
||||
|
||||
/**
|
||||
* @author Andrea Schweer schweer@waikato.ac.nz for the LCoNZ Institutional Research Repositories
|
||||
*/
|
||||
public class SolrImportExportException extends Exception
|
||||
{
|
||||
public SolrImportExportException(String message)
|
||||
{
|
||||
super(message);
|
||||
}
|
||||
|
||||
public SolrImportExportException(String message, Throwable cause)
|
||||
{
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
@@ -13,7 +13,10 @@ import org.dspace.core.Context;
|
||||
import org.dspace.storage.bitstore.BitstreamStorageManager;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import org.dspace.authorize.AuthorizeManager;
|
||||
import org.dspace.authorize.ResourcePolicy;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -46,13 +49,32 @@ public abstract class AbstractVersionProvider {
|
||||
for(Bundle nativeBundle : nativeItem.getBundles())
|
||||
{
|
||||
Bundle bundleNew = itemNew.createBundle(nativeBundle.getName());
|
||||
|
||||
// DSpace knows several types of resource policies (see the class
|
||||
// org.dspace.authorize.ResourcePolicy): Submission, Workflow, Custom
|
||||
// and inherited. Submission, Workflow and Inherited policies will be
|
||||
// set automatically as neccessary. We need to copy the custom policies
|
||||
// only to preserve customly set policies and embargos (which are
|
||||
// realized by custom policies with a start date).
|
||||
List<ResourcePolicy> bundlePolicies =
|
||||
AuthorizeManager.findPoliciesByDSOAndType(c, nativeBundle, ResourcePolicy.TYPE_CUSTOM);
|
||||
AuthorizeManager.addPolicies(c, bundlePolicies, bundleNew);
|
||||
|
||||
for(Bitstream nativeBitstream : nativeBundle.getBitstreams())
|
||||
{
|
||||
|
||||
Bitstream bitstreamNew = createBitstream(c, nativeBitstream);
|
||||
|
||||
bundleNew.addBitstream(bitstreamNew);
|
||||
|
||||
// NOTE: bundle.addBitstream() causes Bundle policies to be inherited by default.
|
||||
// So, we need to REMOVE any inherited TYPE_CUSTOM policies before copying over the correct ones.
|
||||
AuthorizeManager.removeAllPoliciesByDSOAndType(c, bitstreamNew, ResourcePolicy.TYPE_CUSTOM);
|
||||
|
||||
// Now, we need to copy the TYPE_CUSTOM resource policies from old bitstream
|
||||
// to the new bitstream, like we did above for bundles
|
||||
List<ResourcePolicy> bitstreamPolicies =
|
||||
AuthorizeManager.findPoliciesByDSOAndType(c, nativeBitstream, ResourcePolicy.TYPE_CUSTOM);
|
||||
AuthorizeManager.addPolicies(c, bitstreamPolicies, bitstreamNew);
|
||||
|
||||
if(nativeBundle.getPrimaryBitstreamID() == nativeBitstream.getID())
|
||||
{
|
||||
bundleNew.setPrimaryBitstreamID(bitstreamNew.getID());
|
||||
|
@@ -17,6 +17,9 @@ import org.dspace.utils.DSpace;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import org.dspace.authorize.AuthorizeManager;
|
||||
import org.dspace.authorize.ResourcePolicy;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -84,6 +87,15 @@ public class DefaultItemVersionProvider extends AbstractVersionProvider implemen
|
||||
} catch (IdentifierException e) {
|
||||
throw new RuntimeException("Can't create Identifier!");
|
||||
}
|
||||
// DSpace knows several types of resource policies (see the class
|
||||
// org.dspace.authorize.ResourcePolicy): Submission, Workflow, Custom
|
||||
// and inherited. Submission, Workflow and Inherited policies will be
|
||||
// set automatically as neccessary. We need to copy the custom policies
|
||||
// only to preserve customly set policies and embargos (which are
|
||||
// realized by custom policies with a start date).
|
||||
List<ResourcePolicy> policies =
|
||||
AuthorizeManager.findPoliciesByDSOAndType(c, previousItem, ResourcePolicy.TYPE_CUSTOM);
|
||||
AuthorizeManager.addPolicies(c, policies, itemNew);
|
||||
itemNew.update();
|
||||
return itemNew;
|
||||
}catch (SQLException e) {
|
||||
|
@@ -210,7 +210,7 @@ public class WorkflowManager
|
||||
{
|
||||
ArrayList<WorkflowItem> mylist = new ArrayList<WorkflowItem>();
|
||||
|
||||
String myquery = "SELECT * FROM WorkflowItem WHERE owner= ? ";
|
||||
String myquery = "SELECT * FROM WorkflowItem WHERE owner= ? ORDER BY workflow_id";
|
||||
|
||||
TableRowIterator tri = DatabaseManager.queryTable(c,
|
||||
"workflowitem", myquery,e.getID());
|
||||
@@ -246,7 +246,7 @@ public class WorkflowManager
|
||||
|
||||
String myquery = "SELECT workflowitem.* FROM workflowitem, TaskListItem" +
|
||||
" WHERE tasklistitem.eperson_id= ? " +
|
||||
" AND tasklistitem.workflow_id=workflowitem.workflow_id";
|
||||
" AND tasklistitem.workflow_id=workflowitem.workflow_id ORDER BY workflowitem.workflow_id";
|
||||
|
||||
TableRowIterator tri = DatabaseManager
|
||||
.queryTable(c, "workflowitem", myquery, e.getID());
|
||||
|
@@ -289,6 +289,8 @@ jsp.dspace-admin.general.eperson = EPerson
|
||||
jsp.dspace-admin.general.group = Group
|
||||
jsp.dspace-admin.general.group-colon = Group:
|
||||
jsp.dspace-admin.general.next.button = Next >
|
||||
jsp.dspace-admin.general.policy-end-date-colon = End Date:
|
||||
jsp.dspace-admin.general.policy-start-date-colon = Start Date:
|
||||
jsp.dspace-admin.general.remove = Remove
|
||||
jsp.dspace-admin.general.save = Save
|
||||
jsp.dspace-admin.general.update = Update
|
||||
@@ -1000,15 +1002,15 @@ jsp.submit.complete.info = Your submissio
|
||||
jsp.submit.complete.again = Submit another item to the same collection
|
||||
jsp.submit.complete.link = Go to My DSpace
|
||||
jsp.submit.complete.title = Submission Complete!
|
||||
jsp.submit.creative-commons.choice1 = Press the 'Next' button below to <em>keep</em> the license previously chosen.
|
||||
jsp.submit.creative-commons.choice2 = Press the 'Skip Creative Commons' button below to <em>remove</em> the current choice, and forego a Creative Commons license.
|
||||
jsp.submit.creative-commons.choice3 = Complete the selection process below to <em>replace</em> the current choice.
|
||||
jsp.submit.creative-commons.heading = Submit: Use a Creative Commons License
|
||||
jsp.submit.creative-commons.info1 = You have already chosen a Creative Commons license and added it to this item. You may:
|
||||
jsp.submit.creative-commons.info2 = To license your Item under Creative Commons, follow the instructions below. You will be given an opportunity to review your selection. Follow the 'proceed' link to add the license. If you wish to omit a Creative Commons license, press the 'Skip Creative Commons' button.
|
||||
jsp.submit.creative-commons.info3 = Your browser must support IFrames to use this feature
|
||||
jsp.submit.creative-commons.skip.button = Skip Creative Commons >
|
||||
jsp.submit.creative-commons.info1 = If you wish, you may add a <a href="http://creativecommons.org/">Creative Commons</a> License to your item. <strong>Creative Commons licenses govern what people who read your work may then do with it.</strong></message>
|
||||
jsp.submit.creative-commons.title = Use a Creative Commons License
|
||||
|
||||
jsp.submit.creative-commons.license = License Type
|
||||
jsp.submit.creative-commons.select_change = Select or modify your license ...
|
||||
jsp.submit.creative-commons.no_license = No Creative Commons License
|
||||
jsp.submit.creative-commons.license.current = Current license
|
||||
|
||||
jsp.submit.edit-bitstream-access.title = Edit Bitstream Access
|
||||
jsp.submit.edit-bitstream-access.heading = Edit Bitstream Access
|
||||
jsp.submit.edit-bitstream-access.save.button = Save
|
||||
|
@@ -9,40 +9,42 @@
|
||||
|
||||
-->
|
||||
<xsl:stylesheet version="1.1"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:old-cc="http://web.resource.org/cc/"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
exclude-result-prefixes="old-cc">
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:old-cc="http://web.resource.org/cc/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
exclude-result-prefixes="old-cc">
|
||||
|
||||
<xsl:output method="xml" indent="yes"/>
|
||||
|
||||
<!-- process incoming RDF, copy everything add our own statements for cc:Work -->
|
||||
<xsl:template match="/rdf:RDF">
|
||||
<rdf:RDF>
|
||||
<xsl:copy-of select="@*"/>
|
||||
<xsl:apply-templates select="cc:License"/>
|
||||
</rdf:RDF>
|
||||
</xsl:template>
|
||||
<xsl:output method="xml" indent="yes" />
|
||||
|
||||
<!-- handle License element -->
|
||||
<xsl:template match="cc:License">
|
||||
<cc:Work rdf:about="">
|
||||
<cc:license rdf:resource="{@rdf:about}"/>
|
||||
</cc:Work>
|
||||
<cc:License>
|
||||
<xsl:copy-of select="@*"/>
|
||||
<xsl:apply-templates select="node()"/>
|
||||
</cc:License>
|
||||
</xsl:template>
|
||||
|
||||
<!--
|
||||
Identity transform
|
||||
-->
|
||||
<xsl:template match="node()|@*">
|
||||
<xsl:copy>
|
||||
<xsl:apply-templates select="node()|@*"/>
|
||||
</xsl:copy>
|
||||
</xsl:template>
|
||||
|
||||
<xsl:template match="/">
|
||||
<xsl:apply-templates select="result/rdf/rdf:RDF" />
|
||||
</xsl:template>
|
||||
|
||||
<!-- process incoming RDF, copy everything add our own statements for cc:Work -->
|
||||
<xsl:template match="result/rdf/rdf:RDF">
|
||||
<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:cc="http://creativecommons.org/ns#">
|
||||
<xsl:copy-of select="@*" />
|
||||
<xsl:apply-templates select="cc:License" />
|
||||
</rdf:RDF>
|
||||
</xsl:template>
|
||||
|
||||
<!-- handle License element -->
|
||||
<xsl:template match="cc:License">
|
||||
<cc:Work rdf:about="">
|
||||
<cc:license rdf:resource="{@rdf:about}" />
|
||||
</cc:Work>
|
||||
<cc:License>
|
||||
<xsl:copy-of select="@*" />
|
||||
<xsl:apply-templates select="node()" />
|
||||
</cc:License>
|
||||
</xsl:template>
|
||||
|
||||
<!-- Identity transform -->
|
||||
<xsl:template match="node()|@*">
|
||||
<xsl:copy>
|
||||
<xsl:apply-templates select="node()|@*" />
|
||||
</xsl:copy>
|
||||
</xsl:template>
|
||||
|
||||
</xsl:stylesheet>
|
@@ -0,0 +1,24 @@
|
||||
--
|
||||
-- The contents of this file are subject to the license and copyright
|
||||
-- detailed in the LICENSE and NOTICE files at the root of the source
|
||||
-- tree and available online at
|
||||
--
|
||||
-- http://www.dspace.org/license/
|
||||
--
|
||||
|
||||
------------------------------------------------------
|
||||
-- DS-3097 introduced new action id for WITHDRAWN_READ
|
||||
------------------------------------------------------
|
||||
|
||||
UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 0 and resource_id in (
|
||||
SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream
|
||||
LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id
|
||||
LEFT JOIN item ON item2bundle.item_id = item.item_id
|
||||
WHERE item.withdrawn = 1
|
||||
);
|
||||
|
||||
UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 1 and resource_id in (
|
||||
SELECT item2bundle.bundle_id FROM item2bundle
|
||||
LEFT JOIN item ON item2bundle.item_id = item.item_id
|
||||
WHERE item.withdrawn = 1
|
||||
);
|
@@ -0,0 +1,24 @@
|
||||
--
|
||||
-- The contents of this file are subject to the license and copyright
|
||||
-- detailed in the LICENSE and NOTICE files at the root of the source
|
||||
-- tree and available online at
|
||||
--
|
||||
-- http://www.dspace.org/license/
|
||||
--
|
||||
|
||||
------------------------------------------------------
|
||||
-- DS-3097 introduced new action id for WITHDRAWN_READ
|
||||
------------------------------------------------------
|
||||
|
||||
UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 0 and resource_id in (
|
||||
SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream
|
||||
LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id
|
||||
LEFT JOIN item ON item2bundle.item_id = item.item_id
|
||||
WHERE item.withdrawn = 1
|
||||
);
|
||||
|
||||
UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 1 and resource_id in (
|
||||
SELECT item2bundle.bundle_id FROM item2bundle
|
||||
LEFT JOIN item ON item2bundle.item_id = item.item_id
|
||||
WHERE item.withdrawn = 1
|
||||
);
|
@@ -0,0 +1,24 @@
|
||||
--
|
||||
-- The contents of this file are subject to the license and copyright
|
||||
-- detailed in the LICENSE and NOTICE files at the root of the source
|
||||
-- tree and available online at
|
||||
--
|
||||
-- http://www.dspace.org/license/
|
||||
--
|
||||
|
||||
------------------------------------------------------
|
||||
-- DS-3097 introduced new action id for WITHDRAWN_READ
|
||||
------------------------------------------------------
|
||||
|
||||
UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 0 and resource_id in (
|
||||
SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream
|
||||
LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id
|
||||
LEFT JOIN item ON item2bundle.item_id = item.item_id
|
||||
WHERE item.withdrawn = true
|
||||
);
|
||||
|
||||
UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 1 and resource_id in (
|
||||
SELECT item2bundle.bundle_id FROM item2bundle
|
||||
LEFT JOIN item ON item2bundle.item_id = item.item_id
|
||||
WHERE item.withdrawn = true
|
||||
);
|
@@ -410,7 +410,10 @@ public class DCDateTest extends AbstractUnitTest
|
||||
@Test
|
||||
public void testGetCurrent()
|
||||
{
|
||||
assertTrue("testGetCurrent 0", DateUtils.isSameDay(DCDate.getCurrent().toDate(), new Date()));
|
||||
Calendar calendar = Calendar.getInstance();
|
||||
calendar.setTimeInMillis(System.currentTimeMillis());
|
||||
calendar.setTimeZone(TimeZone.getTimeZone("UTC"));
|
||||
assertTrue("testGetCurrent 0", DateUtils.isSameDay(DCDate.getCurrent().toDate(), calendar.getTime()));
|
||||
}
|
||||
|
||||
|
||||
|
@@ -15,6 +15,12 @@ import org.dspace.core.Context;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.List;
|
||||
import java.util.Calendar;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import org.dspace.AbstractUnitTest;
|
||||
import org.apache.log4j.Logger;
|
||||
@@ -23,7 +29,6 @@ import static org.junit.Assert.* ;
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import org.junit.rules.ExpectedException;
|
||||
|
||||
|
||||
/**
|
||||
* Unit Tests for class InstallItem
|
||||
* @author pvillega
|
||||
@@ -193,10 +198,13 @@ public class InstallItemTest extends AbstractUnitTest
|
||||
is.getItem().addMetadata("dc", "date", "issued", Item.ANY, "2011-01-01");
|
||||
|
||||
//get current date
|
||||
DCDate now = DCDate.getCurrent();
|
||||
String dayAndTime = now.toString();
|
||||
//parse out just the date, remove the time (format: yyyy-mm-ddT00:00:00Z)
|
||||
String date = dayAndTime.substring(0, dayAndTime.indexOf("T"));
|
||||
Calendar calendar = Calendar.getInstance();
|
||||
calendar.setTimeInMillis(System.currentTimeMillis());
|
||||
calendar.setTimeZone(TimeZone.getTimeZone("UTC"));
|
||||
|
||||
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
|
||||
|
||||
String date = sdf.format(calendar.getTime());
|
||||
|
||||
Item result = InstallItem.installItem(context, is, handle);
|
||||
context.restoreAuthSystemState();
|
||||
@@ -245,10 +253,11 @@ public class InstallItemTest extends AbstractUnitTest
|
||||
is.getItem().addMetadata("dc", "date", "issued", Item.ANY, "2011-01-01");
|
||||
|
||||
//get current date
|
||||
DCDate now = DCDate.getCurrent();
|
||||
String dayAndTime = now.toString();
|
||||
//parse out just the date, remove the time (format: yyyy-mm-ddT00:00:00Z)
|
||||
String date = dayAndTime.substring(0, dayAndTime.indexOf("T"));
|
||||
Calendar calendar = Calendar.getInstance();
|
||||
calendar.setTimeInMillis(System.currentTimeMillis());
|
||||
calendar.setTimeZone(TimeZone.getTimeZone("UTC"));
|
||||
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
|
||||
String date = sdf.format(calendar.getTime());
|
||||
|
||||
Item result = InstallItem.restoreItem(context, is, handle);
|
||||
context.restoreAuthSystemState();
|
||||
@@ -259,4 +268,4 @@ public class InstallItemTest extends AbstractUnitTest
|
||||
assertThat("testRestoreItem_todayAsIssuedDate 0", issuedDates[0].value, equalTo(date));
|
||||
assertThat("testRestoreItem_todayAsIssuedDate 1", issuedDates[1].value, equalTo("2011-01-01"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -9,6 +9,7 @@ package org.dspace.content;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.sql.SQLException;
|
||||
|
||||
@@ -22,6 +23,7 @@ import java.util.List;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
import org.junit.*;
|
||||
import static org.junit.Assert.* ;
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
@@ -1517,7 +1519,7 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of canEditBoolean method, of class Collection.
|
||||
* Test of canEdit method, of class Item.
|
||||
*/
|
||||
@Test
|
||||
public void testCanEditBooleanAuth() throws Exception
|
||||
@@ -1542,7 +1544,7 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of canEditBoolean method, of class Collection.
|
||||
* Test of canEdit method, of class Item.
|
||||
*/
|
||||
@Test
|
||||
public void testCanEditBooleanAuth2() throws Exception
|
||||
@@ -1567,7 +1569,7 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of canEditBoolean method, of class Collection.
|
||||
* Test of canEdit method, of class Item.
|
||||
*/
|
||||
@Test
|
||||
public void testCanEditBooleanAuth3() throws Exception
|
||||
@@ -1594,7 +1596,7 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of canEditBoolean method, of class Collection.
|
||||
* Test of canEdit method, of class Item.
|
||||
*/
|
||||
@Test
|
||||
public void testCanEditBooleanAuth4() throws Exception
|
||||
@@ -1616,11 +1618,34 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
||||
}};
|
||||
|
||||
// Ensure person with WRITE perms on the Collection can edit item
|
||||
assertTrue("testCanEditBooleanAuth3 0", it.canEdit());
|
||||
assertTrue("testCanEditBooleanAuth4 0", it.canEdit());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of canEditBoolean method, of class Collection.
|
||||
* Test of canEdit method, of class Item.
|
||||
*/
|
||||
@Test
|
||||
public void testCanEditBooleanAuth5() throws Exception
|
||||
{
|
||||
// Test Inheritance of permissions
|
||||
new NonStrictExpectations(AuthorizeManager.class)
|
||||
{{
|
||||
// Disallow Item WRITE perms
|
||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||
Constants.WRITE); result = new AuthorizeException();
|
||||
// Allow Collection WRITE perms
|
||||
AuthorizeManager.authorizeAction((Context) any, (Collection) any,
|
||||
Constants.WRITE,anyBoolean); result = null;
|
||||
}};
|
||||
|
||||
Collection c = Collection.create(context);
|
||||
c.createTemplateItem();
|
||||
c.update();
|
||||
assertTrue("testCanEditBooleanNoAuth5 0", c.getTemplateItem().canEdit());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of canEdit method, of class Item.
|
||||
*/
|
||||
@Test
|
||||
public void testCanEditBooleanNoAuth() throws Exception
|
||||
@@ -1649,6 +1674,87 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
||||
assertFalse("testCanEditBooleanNoAuth 0", it.canEdit());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of canEdit method, of class Item.
|
||||
*/
|
||||
@Test
|
||||
public void testCanEditBooleanNoAuth2() throws Exception
|
||||
{
|
||||
// Test Inheritance of permissions
|
||||
new NonStrictExpectations(AuthorizeManager.class)
|
||||
{{
|
||||
// Disallow Item WRITE perms
|
||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||
Constants.WRITE); result = new AuthorizeException();
|
||||
// Disallow parent Community WRITE and ADD perms
|
||||
AuthorizeManager.authorizeAction((Context) any, (Community) any,
|
||||
Constants.WRITE,anyBoolean); result = new AuthorizeException();
|
||||
AuthorizeManager.authorizeAction((Context) any, (Community) any,
|
||||
Constants.ADD,anyBoolean); result = new AuthorizeException();
|
||||
// Allow parent Collection ADD perms
|
||||
AuthorizeManager.authorizeAction((Context) any, (Collection) any,
|
||||
Constants.ADD,anyBoolean); result = null;
|
||||
}};
|
||||
|
||||
Collection c = Collection.create(context);
|
||||
WorkspaceItem wi = WorkspaceItem.create(context, c, true);
|
||||
assertFalse("testCanEditBooleanNoAuth2 0", wi.getItem().canEdit());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of isInProgressSubmission method, of class Item.
|
||||
* @throws AuthorizeException
|
||||
* @throws SQLException
|
||||
* @throws IOException
|
||||
*
|
||||
*/
|
||||
@Test
|
||||
public void testIsInProgressSubmission() throws SQLException, AuthorizeException, IOException
|
||||
{
|
||||
context.turnOffAuthorisationSystem();
|
||||
Collection c = Collection.create(context);
|
||||
WorkspaceItem wi = WorkspaceItem.create(context, c, true);
|
||||
context.restoreAuthSystemState();
|
||||
assertTrue("testIsInProgressSubmission 0", wi.getItem().isInProgressSubmission());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of isInProgressSubmission method, of class Item.
|
||||
* @throws AuthorizeException
|
||||
* @throws SQLException
|
||||
* @throws IOException
|
||||
*
|
||||
*/
|
||||
@Test
|
||||
public void testIsInProgressSubmissionFalse() throws SQLException, AuthorizeException, IOException
|
||||
{
|
||||
context.turnOffAuthorisationSystem();
|
||||
Collection c = Collection.create(context);
|
||||
WorkspaceItem wi = WorkspaceItem.create(context, c, true);
|
||||
Item item = InstallItem.installItem(context, wi);
|
||||
context.restoreAuthSystemState();
|
||||
assertFalse("testIsInProgressSubmissionFalse 0", item.isInProgressSubmission());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of isInProgressSubmission method, of class Item.
|
||||
* @throws AuthorizeException
|
||||
* @throws SQLException
|
||||
* @throws IOException
|
||||
*
|
||||
*/
|
||||
@Test
|
||||
public void testIsInProgressSubmissionFalse2() throws SQLException, AuthorizeException, IOException
|
||||
{
|
||||
context.turnOffAuthorisationSystem();
|
||||
Collection c = Collection.create(context);
|
||||
c.createTemplateItem();
|
||||
c.update();
|
||||
Item item = c.getTemplateItem();
|
||||
context.restoreAuthSystemState();
|
||||
assertFalse("testIsInProgressSubmissionFalse2 0", item.isInProgressSubmission());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of getName method, of class Item.
|
||||
*/
|
||||
|
@@ -270,13 +270,37 @@ public class WorkspaceItemTest extends AbstractUnitTest
|
||||
* Test of update method, of class WorkspaceItem.
|
||||
*/
|
||||
@Test
|
||||
public void testUpdate() throws Exception
|
||||
public void testUpdateAuth() throws Exception
|
||||
{
|
||||
//TODO: how can we verify it works?
|
||||
wi.update();
|
||||
System.out.println("update");
|
||||
// no need to mockup the authorization as we are the same user that have
|
||||
// created the wi
|
||||
boolean pBefore = wi.isPublishedBefore();
|
||||
wi.setPublishedBefore(!pBefore);
|
||||
wi.update();
|
||||
context.removeCached(wi, wi.getID());
|
||||
wi = WorkspaceItem.find(context, wi.getID());
|
||||
assertTrue("testUpdate", pBefore != wi.isPublishedBefore());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of update method, of class WorkspaceItem with no WRITE auth.
|
||||
*/
|
||||
@Test(expected=AuthorizeException.class)
|
||||
public void testUpdateNoAuth() throws Exception
|
||||
{
|
||||
new NonStrictExpectations(AuthorizeManager.class)
|
||||
{{
|
||||
// Remove Item WRITE perms
|
||||
AuthorizeManager.authorizeActionBoolean((Context) any, (Item) any,
|
||||
Constants.WRITE); result = false;
|
||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||
Constants.WRITE); result = new AuthorizeException();
|
||||
}};
|
||||
boolean pBefore = wi.isPublishedBefore();
|
||||
wi.setPublishedBefore(!pBefore);
|
||||
wi.update();
|
||||
fail("Exception expected");
|
||||
}
|
||||
/**
|
||||
* Test of deleteAll method, of class WorkspaceItem.
|
||||
*/
|
||||
|
@@ -11,12 +11,15 @@ import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import mockit.NonStrictExpectations;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.AbstractUnitTest;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.authorize.AuthorizeManager;
|
||||
import org.dspace.authorize.ResourcePolicy;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Collection;
|
||||
@@ -32,6 +35,7 @@ import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.MockConfigurationManager;
|
||||
import org.dspace.core.PluginManager;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.handle.HandleManager;
|
||||
import static org.hamcrest.CoreMatchers.notNullValue;
|
||||
import static org.hamcrest.CoreMatchers.nullValue;
|
||||
@@ -39,9 +43,9 @@ import org.junit.AfterClass;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.fail;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
|
||||
@@ -65,11 +69,15 @@ public class DSpaceAIPIntegrationTest extends AbstractUnitTest
|
||||
private static String testItemHandle = null;
|
||||
private static String testMappedItemHandle = null;
|
||||
|
||||
/** Create a temporary folder which will be cleaned up automatically by JUnit.
|
||||
NOTE: As a ClassRule, this temp folder is shared by ALL tests below.
|
||||
Its AIP contents are initialized in init() below. **/
|
||||
/** Create a global temporary upload folder which will be cleaned up automatically by JUnit.
|
||||
NOTE: As a ClassRule, this temp folder is shared by ALL tests below. **/
|
||||
@ClassRule
|
||||
public static final TemporaryFolder testFolder = new TemporaryFolder();
|
||||
public static final TemporaryFolder uploadTempFolder = new TemporaryFolder();
|
||||
|
||||
/** Create another temporary folder for AIPs. As a Rule, this one is *recreated* for each
|
||||
test, in order to ensure each test is standalone with respect to AIPs. **/
|
||||
@Rule
|
||||
public final TemporaryFolder aipTempFolder = new TemporaryFolder();
|
||||
|
||||
/**
|
||||
* This method will be run during class initialization. It will initialize
|
||||
@@ -80,14 +88,14 @@ public class DSpaceAIPIntegrationTest extends AbstractUnitTest
|
||||
*/
|
||||
@BeforeClass
|
||||
public static void setUpClass()
|
||||
{
|
||||
{
|
||||
// Initialize MockConfigurationManager, and tell it to load properties by default
|
||||
new MockConfigurationManager(true);
|
||||
|
||||
// Override default value of configured temp directory to point at our
|
||||
// JUnit TemporaryFolder. This ensures Crosswalk classes like RoleCrosswalk
|
||||
// store their temp files in a place where JUnit can clean them up automatically.
|
||||
MockConfigurationManager.setProperty("upload.temp.dir", testFolder.getRoot().getAbsolutePath());
|
||||
MockConfigurationManager.setProperty("upload.temp.dir", uploadTempFolder.getRoot().getAbsolutePath());
|
||||
|
||||
try
|
||||
{
|
||||
@@ -215,50 +223,6 @@ public class DSpaceAIPIntegrationTest extends AbstractUnitTest
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an initial set of AIPs for the test content generated in setUpClass() above.
|
||||
*/
|
||||
@Before
|
||||
@Override
|
||||
public void init()
|
||||
{
|
||||
// call init() from AbstractUnitTest to initialize testing framework
|
||||
super.init();
|
||||
|
||||
try
|
||||
{
|
||||
// Locate the top level community (from our test data)
|
||||
Community topCommunity = (Community) HandleManager.resolveToObject(context, topCommunityHandle);
|
||||
|
||||
log.info("init() - CREATE TEST AIPS");
|
||||
// NOTE: This will not overwrite the AIPs if they already exist.
|
||||
// But, it does ensure they are created PRIOR to running any of the below tests.
|
||||
// (So, essentially, this runs ONCE...after that, it'll be ignored since AIPs already exist)
|
||||
// While ideally, you don't want to share data between tests, generating AIPs is VERY timeconsuming.
|
||||
createAIP(topCommunity, null, true, false);
|
||||
}
|
||||
catch(PackageException|CrosswalkException ex)
|
||||
{
|
||||
log.error("Packaging Error in init()", ex);
|
||||
fail("Packaging Error in init(): " + ex.getMessage());
|
||||
}
|
||||
catch (AuthorizeException ex)
|
||||
{
|
||||
log.error("Authorization Error in init()", ex);
|
||||
fail("Authorization Error in init(): " + ex.getMessage());
|
||||
}
|
||||
catch (IOException ex)
|
||||
{
|
||||
log.error("IO Error in init()", ex);
|
||||
fail("IO Error in init(): " + ex.getMessage());
|
||||
}
|
||||
catch (SQLException ex)
|
||||
{
|
||||
log.error("SQL Error in init()", ex);
|
||||
fail("SQL Error in init(): " + ex.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test restoration from AIP of entire Community Hierarchy
|
||||
*/
|
||||
@@ -284,9 +248,9 @@ public class DSpaceAIPIntegrationTest extends AbstractUnitTest
|
||||
HashMap<String,String> infoMap = new HashMap<String,String>();
|
||||
saveObjectInfo(topCommunity, infoMap);
|
||||
|
||||
// Ensure community & child AIPs are exported (but don't overwrite)
|
||||
// Export community & child AIPs
|
||||
log.info("testRestoreCommunityHierarchy() - CREATE AIPs");
|
||||
File aipFile = createAIP(topCommunity, null, true, false);
|
||||
File aipFile = createAIP(topCommunity, null, true);
|
||||
|
||||
// Delete everything from parent community on down
|
||||
log.info("testRestoreCommunityHierarchy() - DELETE Community Hierarchy");
|
||||
@@ -319,6 +283,87 @@ public class DSpaceAIPIntegrationTest extends AbstractUnitTest
|
||||
log.info("testRestoreCommunityHierarchy() - END");
|
||||
}
|
||||
|
||||
/**
|
||||
* Test restoration from AIP of an access restricted Community
|
||||
*/
|
||||
@Test
|
||||
public void testRestoreRestrictedCommunity() throws Exception
|
||||
{
|
||||
new NonStrictExpectations(AuthorizeManager.class)
|
||||
{{
|
||||
// Allow Full Admin permissions. Since we are working with an object
|
||||
// hierarchy (Items/Bundles/Bitstreams) you need full admin rights
|
||||
AuthorizeManager.isAdmin((Context) any); result = true;
|
||||
}};
|
||||
|
||||
log.info("testRestoreRestrictedCommunity() - BEGIN");
|
||||
|
||||
// Locate the top-level Community (as a parent)
|
||||
Community parent = (Community) HandleManager.resolveToObject(context, topCommunityHandle);
|
||||
|
||||
// Create a brand new (empty) Community to test with
|
||||
Community community = parent.createSubcommunity();
|
||||
community.addMetadata("dc", "title", null, null, "Restricted Community");
|
||||
community.update();
|
||||
String communityHandle = community.getHandle();
|
||||
|
||||
// Create a new Group to access restrict to
|
||||
Group group = Group.create(context);
|
||||
group.setName("Special Users");
|
||||
group.update();
|
||||
|
||||
// Create a custom resource policy for this community
|
||||
List<ResourcePolicy> policies = new ArrayList<>();
|
||||
ResourcePolicy admin_policy = ResourcePolicy.create(context);
|
||||
admin_policy.setRpName("Admin Read-Only");
|
||||
Group adminGroup = Group.find(context, Group.ADMIN_ID);
|
||||
admin_policy.setGroup(adminGroup);
|
||||
admin_policy.setAction(Constants.READ);
|
||||
policies.add(admin_policy);
|
||||
|
||||
// Replace default community policies with this new one
|
||||
AuthorizeManager.removeAllPolicies(context, community);
|
||||
AuthorizeManager.addPolicies(context, policies, community);
|
||||
// Commit these changes to our DB
|
||||
context.commit();
|
||||
|
||||
// Export collection AIP
|
||||
log.info("testRestoreRestrictedCommunity() - CREATE Community AIP");
|
||||
File aipFile = createAIP(community, null, false);
|
||||
|
||||
// Now, delete that Community
|
||||
log.info("testRestoreRestrictedCommunity() - DELETE Community");
|
||||
parent.removeSubcommunity(community);
|
||||
// Commit these changes to our DB
|
||||
context.commit();
|
||||
|
||||
// Assert the deleted Community no longer exists
|
||||
DSpaceObject obj = HandleManager.resolveToObject(context, communityHandle);
|
||||
assertThat("testRestoreRestrictedCommunity() Community " + communityHandle + " doesn't exist", obj, nullValue());
|
||||
|
||||
// Restore Community from AIP (non-recursive)
|
||||
log.info("testRestoreRestrictedCommunity() - RESTORE Community");
|
||||
restoreFromAIP(parent, aipFile, null, false);
|
||||
// Commit these changes to our DB
|
||||
context.commit();
|
||||
|
||||
// Assert the deleted Community is RESTORED
|
||||
DSpaceObject objRestored = HandleManager.resolveToObject(context, communityHandle);
|
||||
assertThat("testRestoreRestrictedCommunity() Community " + communityHandle + " exists", objRestored, notNullValue());
|
||||
|
||||
// Assert the number of restored policies is equal
|
||||
List<ResourcePolicy> policiesRestored = AuthorizeManager.getPolicies(context, objRestored);
|
||||
assertEquals("testRestoreRestrictedCommunity() restored policy count equal", policies.size(), policiesRestored.size());
|
||||
|
||||
// Assert the restored policy has same name, group and permission settings
|
||||
ResourcePolicy restoredPolicy = policiesRestored.get(0);
|
||||
assertEquals("testRestoreRestrictedCommunity() restored policy group successfully", admin_policy.getGroup().getName(), restoredPolicy.getGroup().getName());
|
||||
assertEquals("testRestoreRestrictedCommunity() restored policy action successfully", admin_policy.getAction(), restoredPolicy.getAction());
|
||||
assertEquals("testRestoreRestrictedCommunity() restored policy name successfully", admin_policy.getRpName(), restoredPolicy.getRpName());
|
||||
|
||||
log.info("testRestoreRestrictedCommunity() - END");
|
||||
}
|
||||
|
||||
/**
|
||||
* Test replacement from AIP of entire Community Hierarchy
|
||||
*/
|
||||
@@ -340,9 +385,9 @@ public class DSpaceAIPIntegrationTest extends AbstractUnitTest
|
||||
// Get the count of collections under our Community or any Sub-Communities
|
||||
int numberOfCollections = topCommunity.getAllCollections().length;
|
||||
|
||||
// Ensure community & child AIPs are exported (but don't overwrite)
|
||||
// Export community & child AIPs
|
||||
log.info("testReplaceCommunityHierarchy() - CREATE AIPs");
|
||||
File aipFile = createAIP(topCommunity, null, true, false);
|
||||
File aipFile = createAIP(topCommunity, null, true);
|
||||
|
||||
// Get some basic info about Collection to be deleted
|
||||
// In this scenario, we'll delete the test "Grandchild Collection"
|
||||
@@ -417,9 +462,9 @@ public class DSpaceAIPIntegrationTest extends AbstractUnitTest
|
||||
// Get its current name / title
|
||||
String oldName = topCommunity.getName();
|
||||
|
||||
// Ensure only community AIP is exported (but don't overwrite)
|
||||
// Export only community AIP
|
||||
log.info("testReplaceCommunityOnly() - CREATE Community AIP");
|
||||
File aipFile = createAIP(topCommunity, null, false, false);
|
||||
File aipFile = createAIP(topCommunity, null, false);
|
||||
|
||||
// Change the Community name
|
||||
String newName = "This is NOT my Community name!";
|
||||
@@ -465,9 +510,9 @@ public class DSpaceAIPIntegrationTest extends AbstractUnitTest
|
||||
HashMap<String,String> infoMap = new HashMap<String,String>();
|
||||
saveObjectInfo(testCollection, infoMap);
|
||||
|
||||
// Ensure collection & child AIPs are exported (but don't overwrite)
|
||||
// Export collection & child AIPs
|
||||
log.info("testRestoreCollectionHierarchy() - CREATE AIPs");
|
||||
File aipFile = createAIP(testCollection, null, true, false);
|
||||
File aipFile = createAIP(testCollection, null, true);
|
||||
|
||||
// Delete everything from collection on down
|
||||
log.info("testRestoreCollectionHierarchy() - DELETE Collection Hierarchy");
|
||||
@@ -490,6 +535,87 @@ public class DSpaceAIPIntegrationTest extends AbstractUnitTest
|
||||
log.info("testRestoreCollectionHierarchy() - END");
|
||||
}
|
||||
|
||||
/**
|
||||
* Test restoration from AIP of an access restricted Collection
|
||||
*/
|
||||
@Test
|
||||
public void testRestoreRestrictedCollection() throws Exception
|
||||
{
|
||||
new NonStrictExpectations(AuthorizeManager.class)
|
||||
{{
|
||||
// Allow Full Admin permissions. Since we are working with an object
|
||||
// hierarchy (Items/Bundles/Bitstreams) you need full admin rights
|
||||
AuthorizeManager.isAdmin((Context) any); result = true;
|
||||
}};
|
||||
|
||||
log.info("testRestoreRestrictedCollection() - BEGIN");
|
||||
|
||||
// Locate the top-level Community (as a parent)
|
||||
Community parent = (Community) HandleManager.resolveToObject(context, topCommunityHandle);
|
||||
|
||||
// Create a brand new (empty) Collection to test with
|
||||
Collection collection = parent.createCollection();
|
||||
collection.addMetadata("dc", "title", null, null, "Restricted Collection");
|
||||
collection.update();
|
||||
String collectionHandle = collection.getHandle();
|
||||
|
||||
// Create a new Group to access restrict to
|
||||
Group group = Group.create(context);
|
||||
group.setName("Special Users");
|
||||
group.update();
|
||||
|
||||
// Create a custom resource policy for this collection
|
||||
List<ResourcePolicy> policies = new ArrayList<>();
|
||||
ResourcePolicy admin_policy = ResourcePolicy.create(context);
|
||||
admin_policy.setRpName("Admin Read-Only");
|
||||
Group adminGroup = Group.find(context, Group.ADMIN_ID);
|
||||
admin_policy.setGroup(adminGroup);
|
||||
admin_policy.setAction(Constants.READ);
|
||||
policies.add(admin_policy);
|
||||
|
||||
// Replace default collection policies with this new one
|
||||
AuthorizeManager.removeAllPolicies(context, collection);
|
||||
AuthorizeManager.addPolicies(context, policies, collection);
|
||||
// Commit these changes to our DB
|
||||
context.commit();
|
||||
|
||||
// Export collection AIP
|
||||
log.info("testRestoreRestrictedCollection() - CREATE Collection AIP");
|
||||
File aipFile = createAIP(collection, null, false);
|
||||
|
||||
// Now, delete that Collection
|
||||
log.info("testRestoreRestrictedCollection() - DELETE Collection");
|
||||
parent.removeCollection(collection);
|
||||
// Commit these changes to our DB
|
||||
context.commit();
|
||||
|
||||
// Assert the deleted collection no longer exists
|
||||
DSpaceObject obj = HandleManager.resolveToObject(context, collectionHandle);
|
||||
assertThat("testRestoreRestrictedCollection() Collection " + collectionHandle + " doesn't exist", obj, nullValue());
|
||||
|
||||
// Restore Collection from AIP (non-recursive)
|
||||
log.info("testRestoreRestrictedCollection() - RESTORE Collection");
|
||||
restoreFromAIP(parent, aipFile, null, false);
|
||||
// Commit these changes to our DB
|
||||
context.commit();
|
||||
|
||||
// Assert the deleted Collection is RESTORED
|
||||
DSpaceObject objRestored = HandleManager.resolveToObject(context, collectionHandle);
|
||||
assertThat("testRestoreRestrictedCollection() Collection " + collectionHandle + " exists", objRestored, notNullValue());
|
||||
|
||||
// Assert the number of restored policies is equal
|
||||
List<ResourcePolicy> policiesRestored = AuthorizeManager.getPolicies(context, objRestored);
|
||||
assertEquals("testRestoreRestrictedCollection() restored policy count equal", policies.size(), policiesRestored.size());
|
||||
|
||||
// Assert the restored policy has same name, group and permission settings
|
||||
ResourcePolicy restoredPolicy = policiesRestored.get(0);
|
||||
assertEquals("testRestoreRestrictedCollection() restored policy group successfully", admin_policy.getGroup().getName(), restoredPolicy.getGroup().getName());
|
||||
assertEquals("testRestoreRestrictedCollection() restored policy action successfully", admin_policy.getAction(), restoredPolicy.getAction());
|
||||
assertEquals("testRestoreRestrictedCollection() restored policy name successfully", admin_policy.getRpName(), restoredPolicy.getRpName());
|
||||
|
||||
log.info("testRestoreRestrictedCollection() - END");
|
||||
}
|
||||
|
||||
/**
|
||||
* Test replacement from AIP of entire Collection (with Items)
|
||||
*/
|
||||
@@ -511,9 +637,9 @@ public class DSpaceAIPIntegrationTest extends AbstractUnitTest
|
||||
// How many items are in this Collection?
|
||||
int numberOfItems = testCollection.countItems();
|
||||
|
||||
// Ensure collection & child AIPs are exported (but don't overwrite)
|
||||
// Export collection & child AIPs
|
||||
log.info("testReplaceCollectionHierarchy() - CREATE AIPs");
|
||||
File aipFile = createAIP(testCollection, null, true, false);
|
||||
File aipFile = createAIP(testCollection, null, true);
|
||||
|
||||
// Get some basic info about Item to be deleted
|
||||
// In this scenario, we'll delete the test "Grandchild Collection Item #1"
|
||||
@@ -572,9 +698,9 @@ public class DSpaceAIPIntegrationTest extends AbstractUnitTest
|
||||
// Get its current name / title
|
||||
String oldName = testCollection.getName();
|
||||
|
||||
// Ensure only collection AIP is exported (but don't overwrite)
|
||||
// Export only collection AIP
|
||||
log.info("testReplaceCollectionOnly() - CREATE Collection AIP");
|
||||
File aipFile = createAIP(testCollection, null, false, false);
|
||||
File aipFile = createAIP(testCollection, null, false);
|
||||
|
||||
// Change the Collection name
|
||||
String newName = "This is NOT my Collection name!";
|
||||
@@ -635,9 +761,9 @@ public class DSpaceAIPIntegrationTest extends AbstractUnitTest
|
||||
if(bitstreamCount<=0)
|
||||
fail("No test bitstream found for Item in testRestoreItem()!");
|
||||
|
||||
// Ensure item AIP is exported (but don't overwrite)
|
||||
// Export item AIP
|
||||
log.info("testRestoreItem() - CREATE Item AIP");
|
||||
File aipFile = createAIP(testItem, null, false, false);
|
||||
File aipFile = createAIP(testItem, null, false);
|
||||
|
||||
// Get parent, so we can restore under the same parent
|
||||
Collection parent = (Collection) testItem.getParentObject();
|
||||
@@ -670,6 +796,149 @@ public class DSpaceAIPIntegrationTest extends AbstractUnitTest
|
||||
log.info("testRestoreItem() - END");
|
||||
}
|
||||
|
||||
/**
|
||||
* Test restoration from AIP of an access restricted Item
|
||||
*/
|
||||
@Test
|
||||
public void testRestoreRestrictedItem() throws Exception
|
||||
{
|
||||
new NonStrictExpectations(AuthorizeManager.class)
|
||||
{{
|
||||
// Allow Full Admin permissions. Since we are working with an object
|
||||
// hierarchy (Items/Bundles/Bitstreams) you need full admin rights
|
||||
AuthorizeManager.isAdmin((Context) any); result = true;
|
||||
}};
|
||||
|
||||
log.info("testRestoreRestrictedItem() - BEGIN");
|
||||
|
||||
// Locate the test Collection (as a parent)
|
||||
Collection parent = (Collection) HandleManager.resolveToObject(context, testCollectionHandle);
|
||||
|
||||
// Create a brand new Item to test with (since we will be changing policies)
|
||||
WorkspaceItem wsItem = WorkspaceItem.create(context, parent, false);
|
||||
Item item = InstallItem.installItem(context, wsItem);
|
||||
item.addMetadata("dc", "title", null, null, "Test Restricted Item");
|
||||
// Create a test Bitstream in the ORIGINAL bundle
|
||||
File f = new File(testProps.get("test.bitstream").toString());
|
||||
Bitstream b = item.createSingleBitstream(new FileInputStream(f));
|
||||
b.setName("Test Bitstream");
|
||||
b.update();
|
||||
item.update();
|
||||
|
||||
// Create a custom resource policy for this Item
|
||||
List<ResourcePolicy> policies = new ArrayList<>();
|
||||
ResourcePolicy admin_policy = ResourcePolicy.create(context);
|
||||
admin_policy.setRpName("Admin Read-Only");
|
||||
Group adminGroup = Group.find(context, Group.ADMIN_ID);
|
||||
admin_policy.setGroup(adminGroup);
|
||||
admin_policy.setAction(Constants.READ);
|
||||
policies.add(admin_policy);
|
||||
item.replaceAllItemPolicies(policies);
|
||||
// Commit these changes to our DB
|
||||
context.commit();
|
||||
|
||||
// Export item AIP
|
||||
log.info("testRestoreRestrictedItem() - CREATE Item AIP");
|
||||
File aipFile = createAIP(item, null, false);
|
||||
|
||||
// Get item handle, so we can check that it is later restored properly
|
||||
String itemHandle = item.getHandle();
|
||||
|
||||
// Now, delete that item
|
||||
log.info("testRestoreRestrictedItem() - DELETE Item");
|
||||
parent.removeItem(item);
|
||||
// Commit these changes to our DB
|
||||
context.commit();
|
||||
|
||||
// Assert the deleted item no longer exists
|
||||
DSpaceObject obj = HandleManager.resolveToObject(context, itemHandle);
|
||||
assertThat("testRestoreRestrictedItem() item " + itemHandle + " doesn't exist", obj, nullValue());
|
||||
|
||||
// Restore Item from AIP (non-recursive)
|
||||
log.info("testRestoreRestrictedItem() - RESTORE Item");
|
||||
restoreFromAIP(parent, aipFile, null, false);
|
||||
// Commit these changes to our DB
|
||||
context.commit();
|
||||
|
||||
// Assert the deleted item is RESTORED
|
||||
DSpaceObject objRestored = HandleManager.resolveToObject(context, itemHandle);
|
||||
assertThat("testRestoreRestrictedItem() item " + itemHandle + " exists", objRestored, notNullValue());
|
||||
|
||||
// Assert the number of restored policies is equal
|
||||
List<ResourcePolicy> policiesRestored = AuthorizeManager.getPolicies(context, objRestored);
|
||||
assertEquals("testRestoreRestrictedItem() restored policy count equal", policies.size(), policiesRestored.size());
|
||||
|
||||
// Assert the restored policy has same name, group and permission settings
|
||||
ResourcePolicy restoredPolicy = policiesRestored.get(0);
|
||||
assertEquals("testRestoreRestrictedItem() restored policy group successfully", admin_policy.getGroup().getName(), restoredPolicy.getGroup().getName());
|
||||
assertEquals("testRestoreRestrictedItem() restored policy action successfully", admin_policy.getAction(), restoredPolicy.getAction());
|
||||
assertEquals("testRestoreRestrictedItem() restored policy name successfully", admin_policy.getRpName(), restoredPolicy.getRpName());
|
||||
|
||||
log.info("testRestoreRestrictedItem() - END");
|
||||
}
|
||||
|
||||
/**
|
||||
* Test restoration from AIP of an Item that has no access policies associated with it.
|
||||
*/
|
||||
@Test
|
||||
public void testRestoreItemNoPolicies() throws Exception
|
||||
{
|
||||
new NonStrictExpectations(AuthorizeManager.class)
|
||||
{{
|
||||
// Allow Full Admin permissions. Since we are working with an object
|
||||
// hierarchy (Items/Bundles/Bitstreams) you need full admin rights
|
||||
AuthorizeManager.isAdmin((Context) any); result = true;
|
||||
}};
|
||||
|
||||
log.info("testRestoreItemNoPolicies() - BEGIN");
|
||||
|
||||
// Locate the test Collection (as a parent)
|
||||
Collection parent = (Collection) HandleManager.resolveToObject(context, testCollectionHandle);
|
||||
|
||||
// Create a brand new Item to test with (since we will be changing policies)
|
||||
WorkspaceItem wsItem = WorkspaceItem.create(context, parent, false);
|
||||
Item item = InstallItem.installItem(context, wsItem);
|
||||
item.addMetadata("dc", "title", null, null, "Test No Policies Item");
|
||||
// Create a test Bitstream in the ORIGINAL bundle
|
||||
File f = new File(testProps.get("test.bitstream").toString());
|
||||
Bitstream b = item.createSingleBitstream(new FileInputStream(f));
|
||||
b.setName("Test Bitstream");
|
||||
b.update();
|
||||
item.update();
|
||||
|
||||
// Remove all existing policies from the Item
|
||||
AuthorizeManager.removeAllPolicies(context, item);
|
||||
|
||||
// Export item AIP
|
||||
log.info("testRestoreItemNoPolicies() - CREATE Item AIP");
|
||||
File aipFile = createAIP(item, null, false);
|
||||
|
||||
// Get item handle, so we can check that it is later restored properly
|
||||
String itemHandle = item.getHandle();
|
||||
|
||||
// Now, delete that item
|
||||
log.info("testRestoreItemNoPolicies() - DELETE Item");
|
||||
parent.removeItem(item);
|
||||
|
||||
// Assert the deleted item no longer exists
|
||||
DSpaceObject obj = HandleManager.resolveToObject(context, itemHandle);
|
||||
assertThat("testRestoreItemNoPolicies() item " + itemHandle + " doesn't exist", obj, nullValue());
|
||||
|
||||
// Restore Item from AIP (non-recursive)
|
||||
log.info("testRestoreItemNoPolicies() - RESTORE Item");
|
||||
restoreFromAIP(parent, aipFile, null, false);
|
||||
|
||||
// Assert the deleted item is RESTORED
|
||||
DSpaceObject objRestored = HandleManager.resolveToObject(context, itemHandle);
|
||||
assertThat("testRestoreItemNoPolicies() item " + itemHandle + " exists", objRestored, notNullValue());
|
||||
|
||||
// Assert the restored item also has ZERO policies
|
||||
List<ResourcePolicy> policiesRestored = AuthorizeManager.getPolicies(context, objRestored);
|
||||
assertEquals("testRestoreItemNoPolicies() restored policy count is zero", 0, policiesRestored.size());
|
||||
|
||||
log.info("testRestoreItemNoPolicies() - END");
|
||||
}
|
||||
|
||||
/**
|
||||
* Test replacement from AIP of an Item object
|
||||
*/
|
||||
@@ -691,9 +960,9 @@ public class DSpaceAIPIntegrationTest extends AbstractUnitTest
|
||||
// Get its current name / title
|
||||
String oldName = testItem.getName();
|
||||
|
||||
// Ensure item AIP is exported (but don't overwrite)
|
||||
// Export item AIP
|
||||
log.info("testReplaceItem() - CREATE Item AIP");
|
||||
File aipFile = createAIP(testItem, null, false, false);
|
||||
File aipFile = createAIP(testItem, null, false);
|
||||
|
||||
// Change the Item name
|
||||
String newName = "This is NOT my Item name!";
|
||||
@@ -739,9 +1008,9 @@ public class DSpaceAIPIntegrationTest extends AbstractUnitTest
|
||||
Collection[] mappedCollections = item.getCollections();
|
||||
assertEquals("testRestoreMappedItem() item " + testMappedItemHandle + " is mapped to multiple collections", 2, mappedCollections.length);
|
||||
|
||||
// Ensure mapped item AIP is exported (but don't overwrite)
|
||||
// Export mapped item AIP
|
||||
log.info("testRestoreMappedItem() - CREATE Mapped Item AIP");
|
||||
File aipFile = createAIP(item, null, false, false);
|
||||
File aipFile = createAIP(item, null, false);
|
||||
|
||||
// Now, delete that item (must be removed from BOTH collections to delete it)
|
||||
log.info("testRestoreMappedItem() - DELETE Item");
|
||||
@@ -778,10 +1047,9 @@ public class DSpaceAIPIntegrationTest extends AbstractUnitTest
|
||||
* @param dso DSpaceObject to create AIP(s) for
|
||||
* @param pkParams any special PackageParameters to pass (if any)
|
||||
* @param recursive whether to recursively create AIPs or just a single AIP
|
||||
* @param overwrite whether to overwrite the local AIP file if it is found
|
||||
* @return exported root AIP file
|
||||
*/
|
||||
private File createAIP(DSpaceObject dso, PackageParameters pkgParams, boolean recursive, boolean overwrite)
|
||||
private File createAIP(DSpaceObject dso, PackageParameters pkgParams, boolean recursive)
|
||||
throws PackageException, CrosswalkException, AuthorizeException, SQLException, IOException
|
||||
{
|
||||
// Get a reference to the configured "AIP" package disseminator
|
||||
@@ -790,14 +1058,13 @@ public class DSpaceAIPIntegrationTest extends AbstractUnitTest
|
||||
if (dip == null)
|
||||
{
|
||||
fail("Could not find a disseminator for type 'AIP'");
|
||||
return null;
|
||||
}
|
||||
|
||||
// Export file (this is placed in JUnit's temporary folder, so that it can be cleaned up after tests complete)
|
||||
File exportAIPFile = new File(testFolder.getRoot().getAbsolutePath() + File.separator + PackageUtils.getPackageName(dso, "zip"));
|
||||
|
||||
// To save time, we'll skip re-exporting AIPs, unless overwrite == true
|
||||
if(!exportAIPFile.exists() || overwrite)
|
||||
else
|
||||
{
|
||||
// Export file (this is placed in JUnit's temporary folder, so that it can be cleaned up after tests complete)
|
||||
File exportAIPFile = new File(aipTempFolder.getRoot().getAbsolutePath() + File.separator + PackageUtils.getPackageName(dso, "zip"));
|
||||
|
||||
// If unspecified, set default PackageParameters
|
||||
if (pkgParams==null)
|
||||
pkgParams = new PackageParameters();
|
||||
@@ -807,9 +1074,9 @@ public class DSpaceAIPIntegrationTest extends AbstractUnitTest
|
||||
dip.disseminateAll(context, dso, pkgParams, exportAIPFile);
|
||||
else
|
||||
dip.disseminate(context, dso, pkgParams, exportAIPFile);
|
||||
|
||||
return exportAIPFile;
|
||||
}
|
||||
|
||||
return exportAIPFile;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -830,24 +1097,26 @@ public class DSpaceAIPIntegrationTest extends AbstractUnitTest
|
||||
{
|
||||
fail("Could not find a ingestor for type 'AIP'");
|
||||
}
|
||||
|
||||
if(!aipFile.exists())
|
||||
{
|
||||
fail("AIP Package File does NOT exist: " + aipFile.getAbsolutePath());
|
||||
}
|
||||
|
||||
// If unspecified, set default PackageParameters
|
||||
if(pkgParams==null)
|
||||
pkgParams = new PackageParameters();
|
||||
|
||||
// Ensure restore mode is enabled
|
||||
pkgParams.setRestoreModeEnabled(true);
|
||||
|
||||
// Actually ingest the object(s) from AIPs
|
||||
if(recursive)
|
||||
sip.ingestAll(context, parent, aipFile, pkgParams, null);
|
||||
else
|
||||
sip.ingest(context, parent, aipFile, pkgParams, null);
|
||||
{
|
||||
if(!aipFile.exists())
|
||||
{
|
||||
fail("AIP Package File does NOT exist: " + aipFile.getAbsolutePath());
|
||||
}
|
||||
|
||||
// If unspecified, set default PackageParameters
|
||||
if(pkgParams==null)
|
||||
pkgParams = new PackageParameters();
|
||||
|
||||
// Ensure restore mode is enabled
|
||||
pkgParams.setRestoreModeEnabled(true);
|
||||
|
||||
// Actually ingest the object(s) from AIPs
|
||||
if(recursive)
|
||||
sip.ingestAll(context, parent, aipFile, pkgParams, null);
|
||||
else
|
||||
sip.ingest(context, parent, aipFile, pkgParams, null);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -868,24 +1137,26 @@ public class DSpaceAIPIntegrationTest extends AbstractUnitTest
|
||||
{
|
||||
fail("Could not find a ingestor for type 'AIP'");
|
||||
}
|
||||
|
||||
if(!aipFile.exists())
|
||||
{
|
||||
fail("AIP Package File does NOT exist: " + aipFile.getAbsolutePath());
|
||||
}
|
||||
|
||||
// If unspecified, set default PackageParameters
|
||||
if (pkgParams==null)
|
||||
pkgParams = new PackageParameters();
|
||||
|
||||
// Ensure restore mode is enabled
|
||||
pkgParams.setRestoreModeEnabled(true);
|
||||
|
||||
// Actually replace the object(s) from AIPs
|
||||
if(recursive)
|
||||
sip.replaceAll(context, dso, aipFile, pkgParams);
|
||||
else
|
||||
sip.replace(context, dso, aipFile, pkgParams);
|
||||
{
|
||||
if(!aipFile.exists())
|
||||
{
|
||||
fail("AIP Package File does NOT exist: " + aipFile.getAbsolutePath());
|
||||
}
|
||||
|
||||
// If unspecified, set default PackageParameters
|
||||
if (pkgParams==null)
|
||||
pkgParams = new PackageParameters();
|
||||
|
||||
// Ensure restore mode is enabled
|
||||
pkgParams.setRestoreModeEnabled(true);
|
||||
|
||||
// Actually replace the object(s) from AIPs
|
||||
if(recursive)
|
||||
sip.replaceAll(context, dso, aipFile, pkgParams);
|
||||
else
|
||||
sip.replace(context, dso, aipFile, pkgParams);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -568,7 +568,7 @@ public class DOIIdentifierProviderTest
|
||||
assumeNotNull(doiRow);
|
||||
|
||||
assertTrue("Reservation of DOI did not set the corret DOI status.",
|
||||
DOIIdentifierProvider.TO_BE_RESERVERED.intValue() == doiRow.getIntColumn("status"));
|
||||
DOIIdentifierProvider.TO_BE_RESERVED.intValue() == doiRow.getIntColumn("status"));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@@ -10,16 +10,22 @@ package org.dspace.identifier;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.UUID;
|
||||
import org.dspace.AbstractUnitTest;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.*;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.identifier.ezid.DateToYear;
|
||||
import org.dspace.identifier.ezid.Transform;
|
||||
import org.dspace.kernel.ServiceManager;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
import org.dspace.workflow.WorkflowManager;
|
||||
import org.junit.*;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
/**
|
||||
@@ -29,10 +35,22 @@ import static org.junit.Assert.*;
|
||||
public class EZIDIdentifierProviderTest
|
||||
extends AbstractUnitTest
|
||||
{
|
||||
/** Name of the reserved EZID test authority */
|
||||
/** Name of the reserved EZID test authority. */
|
||||
private static final String TEST_SHOULDER = "10.5072/FK2";
|
||||
|
||||
private static ServiceManager sm = null;
|
||||
/** A sensible metadata crosswalk. */
|
||||
private static final Map<String, String> aCrosswalk = new HashMap<>();
|
||||
static {
|
||||
aCrosswalk.put("datacite.creator", "dc.contributor.author");
|
||||
aCrosswalk.put("datacite.title", "dc.title");
|
||||
aCrosswalk.put("datacite.publisher", "dc.publisher");
|
||||
aCrosswalk.put("datacite.publicationyear", "dc.date.issued");
|
||||
}
|
||||
/** A sensible set of metadata transforms. */
|
||||
private static final Map<String, Transform> crosswalkTransforms = new HashMap();
|
||||
static {
|
||||
crosswalkTransforms.put("datacite.publicationyear", new DateToYear());
|
||||
}
|
||||
|
||||
private static ConfigurationService config = null;
|
||||
|
||||
@@ -49,6 +67,9 @@ public class EZIDIdentifierProviderTest
|
||||
|
||||
private static void dumpMetadata(Item eyetem)
|
||||
{
|
||||
if (null == eyetem)
|
||||
return;
|
||||
|
||||
Metadatum[] metadata = eyetem.getMetadata("dc", Item.ANY, Item.ANY, Item.ANY);
|
||||
for (Metadatum metadatum : metadata)
|
||||
System.out.printf("Metadata: %s.%s.%s(%s) = %s\n",
|
||||
@@ -89,30 +110,11 @@ public class EZIDIdentifierProviderTest
|
||||
return item;
|
||||
}
|
||||
|
||||
/*
|
||||
@BeforeClass
|
||||
public static void setUpClass()
|
||||
throws Exception
|
||||
{
|
||||
Context ctx = new Context();
|
||||
ctx.turnOffAuthorisationSystem();
|
||||
|
||||
ctx.setCurrentUser(eperson);
|
||||
|
||||
// Create an environment for our test objects to live in.
|
||||
community = Community.create(null, ctx);
|
||||
community.setMetadata("name", "A Test Community");
|
||||
community.update();
|
||||
|
||||
collection = community.createCollection();
|
||||
collection.setMetadata("name", "A Test Collection");
|
||||
collection.update();
|
||||
|
||||
ctx.complete();
|
||||
|
||||
// Find the usual kernel services
|
||||
sm = kernelImpl.getServiceManager();
|
||||
|
||||
config = kernelImpl.getConfigurationService();
|
||||
|
||||
// Configure the service under test.
|
||||
@@ -129,71 +131,67 @@ public class EZIDIdentifierProviderTest
|
||||
throws Exception
|
||||
{
|
||||
System.out.print("Tearing down\n\n");
|
||||
Context ctx = new Context();
|
||||
dumpMetadata(Item.find(ctx, itemID));
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setUp()
|
||||
throws Exception
|
||||
{
|
||||
context.setCurrentUser(eperson);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
// Create an environment for our test objects to live in.
|
||||
community = Community.create(null, context);
|
||||
community.setMetadata("name", "A Test Community");
|
||||
community.update();
|
||||
|
||||
collection = community.createCollection();
|
||||
collection.setMetadata("name", "A Test Collection");
|
||||
collection.update();
|
||||
|
||||
context.commit();
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown()
|
||||
throws SQLException
|
||||
{
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
*/
|
||||
|
||||
/** Dummy test. */
|
||||
@Test
|
||||
public void testNothing()
|
||||
{
|
||||
System.out.println("dummy");
|
||||
dumpMetadata(Item.find(context, itemID));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of supports method, of class DataCiteIdentifierProvider.
|
||||
*/
|
||||
/*
|
||||
@Test
|
||||
public void testSupports_Class()
|
||||
{
|
||||
System.out.println("supports Class");
|
||||
|
||||
EZIDIdentifierProvider instance
|
||||
= (EZIDIdentifierProvider)
|
||||
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
|
||||
Class<? extends Identifier> identifier = DOI.class;
|
||||
boolean result = instance.supports(identifier);
|
||||
assertTrue("DOI should be supported", result);
|
||||
assertTrue("DOI is supported", result);
|
||||
}
|
||||
*/
|
||||
|
||||
/**
|
||||
* Test of supports method, of class DataCiteIdentifierProvider.
|
||||
*/
|
||||
/*
|
||||
@Test
|
||||
public void testSupports_String()
|
||||
{
|
||||
System.out.println("supports String");
|
||||
|
||||
EZIDIdentifierProvider instance
|
||||
= (EZIDIdentifierProvider)
|
||||
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
|
||||
String identifier = "doi:" + TEST_SHOULDER;
|
||||
boolean result = instance.supports(identifier);
|
||||
assertTrue(identifier + " should be supported", result);
|
||||
assertTrue(identifier + " is supported", result);
|
||||
}
|
||||
*/
|
||||
|
||||
/**
|
||||
* Test of register method, of class DataCiteIdentifierProvider.
|
||||
* Test of register method, of class EZIDIdentifierProvider.
|
||||
*/
|
||||
/*
|
||||
@Test
|
||||
@@ -202,9 +200,7 @@ public class EZIDIdentifierProviderTest
|
||||
{
|
||||
System.out.println("register Context, DSpaceObject");
|
||||
|
||||
EZIDIdentifierProvider instance
|
||||
= (EZIDIdentifierProvider)
|
||||
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
|
||||
DSpaceObject dso = newItem(context);
|
||||
|
||||
@@ -224,9 +220,7 @@ public class EZIDIdentifierProviderTest
|
||||
{
|
||||
System.out.println("register 3");
|
||||
|
||||
EZIDIdentifierProvider instance
|
||||
= (EZIDIdentifierProvider)
|
||||
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
|
||||
DSpaceObject object = newItem(context);
|
||||
|
||||
@@ -246,9 +240,7 @@ public class EZIDIdentifierProviderTest
|
||||
{
|
||||
System.out.println("reserve");
|
||||
|
||||
EZIDIdentifierProvider instance
|
||||
= (EZIDIdentifierProvider)
|
||||
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
|
||||
DSpaceObject dso = newItem(context);
|
||||
String identifier = UUID.randomUUID().toString();
|
||||
@@ -266,13 +258,11 @@ public class EZIDIdentifierProviderTest
|
||||
{
|
||||
System.out.println("mint");
|
||||
|
||||
EZIDIdentifierProvider instance
|
||||
= (EZIDIdentifierProvider)
|
||||
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
|
||||
DSpaceObject dso = newItem(context);
|
||||
String result = instance.mint(context, dso);
|
||||
assertNotNull("Null returned", result);
|
||||
assertNotNull("Non-null returned", result);
|
||||
}
|
||||
*/
|
||||
|
||||
@@ -286,9 +276,7 @@ public class EZIDIdentifierProviderTest
|
||||
{
|
||||
System.out.println("resolve");
|
||||
|
||||
EZIDIdentifierProvider instance
|
||||
= (EZIDIdentifierProvider)
|
||||
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
|
||||
String identifier = UUID.randomUUID().toString();
|
||||
DSpaceObject expResult = newItem(context);
|
||||
@@ -310,9 +298,7 @@ public class EZIDIdentifierProviderTest
|
||||
{
|
||||
System.out.println("lookup");
|
||||
|
||||
EZIDIdentifierProvider instance
|
||||
= (EZIDIdentifierProvider)
|
||||
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
|
||||
String identifier = UUID.randomUUID().toString();
|
||||
DSpaceObject object = newItem(context);
|
||||
@@ -333,9 +319,7 @@ public class EZIDIdentifierProviderTest
|
||||
{
|
||||
System.out.println("delete 2");
|
||||
|
||||
EZIDIdentifierProvider instance
|
||||
= (EZIDIdentifierProvider)
|
||||
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
|
||||
DSpaceObject dso = newItem(context);
|
||||
|
||||
@@ -370,18 +354,16 @@ public class EZIDIdentifierProviderTest
|
||||
*/
|
||||
|
||||
/**
|
||||
* Test of delete method, of class DataCiteIdentifierProvider.
|
||||
* Test of delete method, of class EZIDIdentifierProvider.
|
||||
*/
|
||||
/*
|
||||
@Test()
|
||||
@Test
|
||||
public void testDelete_3args()
|
||||
throws Exception
|
||||
{
|
||||
System.out.println("delete 3");
|
||||
|
||||
EZIDIdentifierProvider instance
|
||||
= (EZIDIdentifierProvider)
|
||||
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
|
||||
DSpaceObject dso = newItem(context);
|
||||
String identifier = UUID.randomUUID().toString();
|
||||
@@ -400,4 +382,45 @@ public class EZIDIdentifierProviderTest
|
||||
assertFalse("Test identifier is still present", found.hasNext());
|
||||
}
|
||||
*/
|
||||
|
||||
/**
|
||||
* Test of crosswalkMetadata method, of class EZIDIdentifierProvider.
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test
|
||||
public void testCrosswalkMetadata()
|
||||
throws Exception
|
||||
{
|
||||
System.out.println("crosswalkMetadata");
|
||||
|
||||
// Set up the instance to be tested
|
||||
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
|
||||
instance.setConfigurationService(config);
|
||||
instance.setCrosswalk(aCrosswalk);
|
||||
instance.setCrosswalkTransform(crosswalkTransforms);
|
||||
|
||||
// Let's have a fresh Item to work with
|
||||
DSpaceObject dso = newItem(context);
|
||||
String handle = dso.getHandle();
|
||||
|
||||
// Test!
|
||||
Map<String, String> metadata = instance.crosswalkMetadata(dso);
|
||||
|
||||
// Evaluate
|
||||
String target = (String) metadata.get("_target");
|
||||
assertEquals("Generates correct _target metadatum",
|
||||
config.getProperty("dspace.url") + "/handle/" + handle,
|
||||
target);
|
||||
assertTrue("Has title", metadata.containsKey("datacite.title"));
|
||||
assertTrue("Has publication year", metadata.containsKey("datacite.publicationyear"));
|
||||
assertTrue("Has publisher", metadata.containsKey("datacite.publisher"));
|
||||
assertTrue("Has creator", metadata.containsKey("datacite.creator"));
|
||||
|
||||
// Dump out the generated metadata for inspection
|
||||
System.out.println("Results:");
|
||||
for (Entry metadatum : metadata.entrySet())
|
||||
{
|
||||
System.out.printf(" %s : %s\n", metadatum.getKey(), metadatum.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -12,7 +12,6 @@ import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.*;
|
||||
|
||||
import org.dspace.AbstractUnitTest;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
@@ -24,20 +23,26 @@ import org.junit.runners.Parameterized;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
/**
|
||||
* Drive the MultiFormatDateParser from a table of test formats and sample data
|
||||
* using JUnit's Parameterized runner.
|
||||
*
|
||||
* @author mhwood
|
||||
*/
|
||||
@RunWith(Parameterized.class)
|
||||
public class MultiFormatDateParserTest
|
||||
extends AbstractUnitTest
|
||||
{
|
||||
private String testMessage;
|
||||
private String toParseDate;
|
||||
private String expectedFormat;
|
||||
private boolean expectedResult;
|
||||
private static Locale vmLocale;
|
||||
private final String testMessage;
|
||||
private final String toParseDate;
|
||||
private final String expectedFormat;
|
||||
private final boolean expectedResult;
|
||||
|
||||
|
||||
public MultiFormatDateParserTest(String testMessage, String toParseDate, String expectedFormat, boolean expectedResult)
|
||||
/**
|
||||
* Test a single date format.
|
||||
* JUnit will instantiate this class repeatedly with data from {@link #dateFormatsToTest}.
|
||||
*/
|
||||
public MultiFormatDateParserTest(String testMessage, String toParseDate,
|
||||
String expectedFormat, boolean expectedResult)
|
||||
{
|
||||
this.testMessage = testMessage;
|
||||
this.toParseDate = toParseDate;
|
||||
@@ -45,6 +50,7 @@ public class MultiFormatDateParserTest
|
||||
this.expectedResult = expectedResult;
|
||||
}
|
||||
|
||||
/** Date formats and samples to drive the parameterized test. */
|
||||
@Parameterized.Parameters
|
||||
public static Collection dateFormatsToTest() {
|
||||
return Arrays.asList(new Object[][]{
|
||||
@@ -78,11 +84,49 @@ public class MultiFormatDateParserTest
|
||||
@BeforeClass
|
||||
public static void setUpClass()
|
||||
{
|
||||
// store default locale of the environment
|
||||
vmLocale = Locale.getDefault();
|
||||
// set default locale to English just for the test of this class
|
||||
Locale.setDefault(Locale.ENGLISH);
|
||||
Map<String, String> formats = new HashMap<>(32);
|
||||
formats.put("\\d{8}" ,"yyyyMMdd");
|
||||
formats.put("\\d{1,2}-\\d{1,2}-\\d{4}", "dd-MM-yyyy");
|
||||
formats.put("\\d{4}-\\d{1,2}-\\d{1,2}", "yyyy-MM-dd");
|
||||
formats.put("\\d{4}-\\d{1,2}", "yyyy-MM");
|
||||
formats.put("\\d{1,2}/\\d{1,2}/\\d{4}", "MM/dd/yyyy");
|
||||
formats.put("\\d{4}/\\d{1,2}/\\d{1,2}", "yyyy/MM/dd");
|
||||
formats.put("\\d{1,2}\\s[a-z]{3}\\s\\d{4}", "dd MMM yyyy");
|
||||
formats.put("\\d{1,2}\\s[a-z]{4,}\\s\\d{4}", "dd MMMM yyyy");
|
||||
formats.put("\\d{12}", "yyyyMMddHHmm");
|
||||
formats.put("\\d{8}\\s\\d{4}", "yyyyMMdd HHmm");
|
||||
formats.put("\\d{1,2}-\\d{1,2}-\\d{4}\\s\\d{1,2}:\\d{2}", "dd-MM-yyyy HH:mm");
|
||||
formats.put("\\d{4}-\\d{1,2}-\\d{1,2}\\s\\d{1,2}:\\d{2}", "yyyy-MM-dd HH:mm");
|
||||
formats.put("\\d{1,2}/\\d{1,2}/\\d{4}\\s\\d{1,2}:\\d{2}", "MM/dd/yyyy HH:mm");
|
||||
formats.put("\\d{4}/\\d{1,2}/\\d{1,2}\\s\\d{1,2}:\\d{2}", "yyyy/MM/dd HH:mm");
|
||||
formats.put("\\d{1,2}\\s[a-z]{3}\\s\\d{4}\\s\\d{1,2}:\\d{2}", "dd MMM yyyy HH:mm");
|
||||
formats.put("\\d{1,2}\\s[a-z]{4,}\\s\\d{4}\\s\\d{1,2}:\\d{2}", "dd MMMM yyyy HH:mm");
|
||||
formats.put("\\d{4}\\s[a-z]{3}\\s\\d{1,2}", "yyyy MMM dd");
|
||||
formats.put("\\d{14}", "yyyyMMddHHmmss");
|
||||
formats.put("\\d{6}", "yyyyMM");
|
||||
formats.put("\\d{4}", "yyyy");
|
||||
formats.put("\\d{8}\\s\\d{6}", "yyyyMMdd HHmmss");
|
||||
formats.put("\\d{1,2}-\\d{1,2}-\\d{4}\\s\\d{1,2}:\\d{2}:\\d{2}", "dd-MM-yyyy HH:mm:ss");
|
||||
formats.put("\\d{4}-\\d{1,2}-\\d{1,2}\\s\\d{1,2}:\\d{2}:\\d{2}", "yyyy-MM-dd HH:mm:ss");
|
||||
formats.put("\\d{1,2}/\\d{1,2}/\\d{4}\\s\\d{1,2}:\\d{2}:\\d{2}", "MM/dd/yyyy HH:mm:ss");
|
||||
formats.put("\\d{4}/\\d{1,2}/\\d{1,2}\\s\\d{1,2}:\\d{2}:\\d{2}", "yyyy/MM/dd HH:mm:ss");
|
||||
formats.put("\\d{1,2}\\s[a-z]{3}\\s\\d{4}\\s\\d{1,2}:\\d{2}:\\d{2}", "dd MMM yyyy HH:mm:ss");
|
||||
formats.put("\\d{1,2}\\s[a-z]{4,}\\s\\d{4}\\s\\d{1,2}:\\d{2}:\\d{2}", "dd MMMM yyyy HH:mm:ss");
|
||||
formats.put("\\d{4}-\\d{1,2}-\\d{1,2}T\\d{1,2}:\\d{2}:\\d{2}Z", "yyyy-MM-dd'T'HH:mm:ss'Z'");
|
||||
formats.put("\\d{4}-\\d{1,2}-\\d{1,2}T\\d{1,2}:\\d{2}:\\d{2}\\.\\d{3}Z", "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
|
||||
|
||||
new MultiFormatDateParser().setPatterns(formats);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDownClass()
|
||||
{
|
||||
// restore locale
|
||||
Locale.setDefault(vmLocale);
|
||||
}
|
||||
|
||||
@Before
|
||||
@@ -102,6 +146,7 @@ public class MultiFormatDateParserTest
|
||||
public void testParse() throws ParseException
|
||||
{
|
||||
SimpleDateFormat simpleDateFormat = new SimpleDateFormat(expectedFormat);
|
||||
simpleDateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
|
||||
Date result = MultiFormatDateParser.parse(toParseDate);
|
||||
assertEquals(testMessage, expectedResult, simpleDateFormat.parse(toParseDate).equals(result));
|
||||
}
|
||||
|
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>5.0</version>
|
||||
<version>5.6</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@@ -225,6 +225,8 @@ public class DiscoverUtility
|
||||
String query = request.getParameter("query");
|
||||
if (StringUtils.isNotBlank(query))
|
||||
{
|
||||
// Escape any special characters in this user-entered query
|
||||
query = escapeQueryChars(query);
|
||||
queryArgs.setQuery(query);
|
||||
}
|
||||
|
||||
@@ -267,6 +269,19 @@ public class DiscoverUtility
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape colon-space sequence in a user-entered query, based on the
|
||||
* underlying search service. This is intended to let end users paste in a
|
||||
* title containing colon-space without requiring them to escape the colon.
|
||||
*
|
||||
* @param query user-entered query string
|
||||
* @return query with colon in colon-space sequence escaped
|
||||
*/
|
||||
private static String escapeQueryChars(String query)
|
||||
{
|
||||
return StringUtils.replace(query, ": ", "\\: ");
|
||||
}
|
||||
|
||||
private static void setPagination(HttpServletRequest request,
|
||||
DiscoverQuery queryArgs,
|
||||
DiscoveryConfiguration discoveryConfiguration)
|
||||
|
@@ -0,0 +1,74 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.webui.json;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.license.CCLicenseField;
|
||||
import org.dspace.license.CCLookup;
|
||||
|
||||
import com.google.gson.Gson;
|
||||
import com.google.gson.JsonElement;
|
||||
import com.google.gson.JsonObject;
|
||||
|
||||
/**
|
||||
* @author Luigi Andrea Pascarelli
|
||||
*/
|
||||
public class CreativeCommonsJSONRequest extends JSONRequest {
|
||||
|
||||
private static Logger log = Logger.getLogger(CreativeCommonsJSONRequest.class);
|
||||
|
||||
@Override
|
||||
public void doJSONRequest(Context context, HttpServletRequest req, HttpServletResponse resp)
|
||||
throws AuthorizeException, IOException {
|
||||
Gson json = new Gson();
|
||||
String selectedLicense = req.getParameter("license");
|
||||
|
||||
List<CCLicenseField> dto = new ArrayList<CCLicenseField>();
|
||||
|
||||
if (StringUtils.isNotBlank(selectedLicense)) {
|
||||
CCLookup cclookup = new CCLookup();
|
||||
|
||||
String ccLocale = ConfigurationManager.getProperty("cc.license.locale");
|
||||
/** Default locale to 'en' */
|
||||
ccLocale = (StringUtils.isNotBlank(ccLocale)) ? ccLocale : "en";
|
||||
|
||||
// output the license fields chooser for the license class type
|
||||
if (cclookup.getLicenseFields(selectedLicense, ccLocale) == null) {
|
||||
// do nothing
|
||||
} else {
|
||||
Collection<CCLicenseField> outerIterator = cclookup.getLicenseFields(selectedLicense, ccLocale);
|
||||
for(CCLicenseField cclicensefield : outerIterator) {
|
||||
if (cclicensefield.getId().equals("jurisdiction"))
|
||||
continue;
|
||||
dto.add(cclicensefield);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
JsonElement tree = json.toJsonTree(dto);
|
||||
JsonObject jo = new JsonObject();
|
||||
jo.add("result", tree);
|
||||
resp.getWriter().write(jo.toString());
|
||||
}
|
||||
|
||||
}
|
@@ -982,11 +982,13 @@ public class ItemTag extends TagSupport
|
||||
|
||||
if (tb != null)
|
||||
{
|
||||
String myPath = request.getContextPath()
|
||||
+ "/retrieve/"
|
||||
+ tb.getID()
|
||||
+ "/"
|
||||
+ UIUtil.encodeBitstreamName(tb
|
||||
if (AuthorizeManager.authorizeActionBoolean(context, tb, Constants.READ))
|
||||
{
|
||||
String myPath = request.getContextPath()
|
||||
+ "/retrieve/"
|
||||
+ tb.getID()
|
||||
+ "/"
|
||||
+ UIUtil.encodeBitstreamName(tb
|
||||
.getName(),
|
||||
Constants.DEFAULT_ENCODING);
|
||||
|
||||
@@ -995,6 +997,7 @@ public class ItemTag extends TagSupport
|
||||
out.print("<img src=\"" + myPath + "\" ");
|
||||
out.print("alt=\"" + tAltText
|
||||
+ "\" /></a><br />");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -63,30 +63,7 @@ public class SelectCollectionTag extends TagSupport
|
||||
{
|
||||
HttpServletRequest hrq = (HttpServletRequest) pageContext.getRequest();
|
||||
Context context = UIUtil.obtainContext(hrq);
|
||||
Map<Community, List<Collection>> commCollList = new LinkedHashMap<Community, List<Collection>>();
|
||||
|
||||
for (Community topcommunity : Community.findAllTop(context))
|
||||
{
|
||||
for (Collection collection : topcommunity.getCollections())
|
||||
{
|
||||
List<Collection> clist = null;
|
||||
if (commCollList.containsKey(topcommunity))
|
||||
{
|
||||
clist = commCollList.get(topcommunity);
|
||||
}
|
||||
else
|
||||
{
|
||||
clist = new ArrayList<Collection>();
|
||||
}
|
||||
clist.add(collection);
|
||||
commCollList.put(topcommunity, clist);
|
||||
}
|
||||
|
||||
for (Community subcommunity : topcommunity.getSubcommunities())
|
||||
{
|
||||
addCommCollList(subcommunity, commCollList);
|
||||
}
|
||||
}
|
||||
Collection[] collections = (Collection[]) hrq.getAttribute("collections");
|
||||
|
||||
sb.append("<select");
|
||||
if (name != null)
|
||||
@@ -109,22 +86,16 @@ public class SelectCollectionTag extends TagSupport
|
||||
if (collection == -1) sb.append(" selected=\"selected\"");
|
||||
sb.append(">").append(firstOption).append("</option>\n");
|
||||
|
||||
Iterator<Community> iter = commCollList.keySet().iterator();
|
||||
while(iter.hasNext())
|
||||
for (Collection coll : collections)
|
||||
{
|
||||
Community comm = iter.next();
|
||||
//sb.append("<optgroup label=\"").append(getCommName(comm)).append("\">\n");
|
||||
for (Collection coll : commCollList.get(comm))
|
||||
sb.append("<option value=\"").append(coll.getID()).append("\"");
|
||||
if (collection == coll.getID())
|
||||
{
|
||||
sb.append("<option value=\"").append(coll.getID()).append("\"");
|
||||
if (collection == coll.getID())
|
||||
{
|
||||
sb.append(" selected=\"selected\"");
|
||||
}
|
||||
sb.append(">").append(CollectionDropDown.collectionPath(coll)).append("</option>\n");
|
||||
sb.append(" selected=\"selected\"");
|
||||
}
|
||||
//sb.append("</optgroup>\n");
|
||||
sb.append(">").append(CollectionDropDown.collectionPath(coll)).append("</option>\n");
|
||||
}
|
||||
|
||||
sb.append("</select>\n");
|
||||
|
||||
out.print(sb.toString());
|
||||
@@ -141,45 +112,6 @@ public class SelectCollectionTag extends TagSupport
|
||||
return SKIP_BODY;
|
||||
}
|
||||
|
||||
private void addCommCollList(Community community, Map<Community,
|
||||
List<Collection>> commCollList) throws SQLException
|
||||
{
|
||||
for (Collection collection : community.getCollections())
|
||||
{
|
||||
List<Collection> clist = null;
|
||||
if (commCollList.containsKey(community))
|
||||
{
|
||||
clist = commCollList.get(community);
|
||||
}
|
||||
else
|
||||
{
|
||||
clist = new ArrayList<Collection>();
|
||||
}
|
||||
clist.add(collection);
|
||||
commCollList.put(community, clist);
|
||||
}
|
||||
|
||||
for (Community subcommunity : community.getSubcommunities())
|
||||
{
|
||||
addCommCollList(subcommunity, commCollList);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private String getCommName(Community community) throws SQLException
|
||||
{
|
||||
StringBuffer sb = new StringBuffer("");
|
||||
Community[] parents = community.getAllParents();
|
||||
for (Community parent : parents)
|
||||
{
|
||||
sb.insert(0, parent.getMetadata("name")+"/");
|
||||
}
|
||||
sb.append(community.getMetadata("name"));
|
||||
|
||||
return sb.toString().substring(1);
|
||||
}
|
||||
|
||||
|
||||
public String getKlass()
|
||||
{
|
||||
return klass;
|
||||
|
@@ -14,6 +14,7 @@ import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.app.webui.util.UIUtil;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
@@ -31,6 +32,7 @@ import org.dspace.content.Community;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.core.Utils;
|
||||
import org.dspace.discovery.configuration.TagCloudConfiguration;
|
||||
|
||||
/**
|
||||
@@ -88,6 +90,25 @@ public abstract class AbstractBrowserServlet extends DSpaceServlet
|
||||
String month = request.getParameter("month");
|
||||
String year = request.getParameter("year");
|
||||
String startsWith = request.getParameter("starts_with");
|
||||
//validate input to avoid cross-site scripting
|
||||
try {
|
||||
if (StringUtils.isNotBlank(month) && !"-1".equals(month)) {
|
||||
Integer.valueOf(month);
|
||||
}
|
||||
if (StringUtils.isNotBlank(year) && !"-1".equals(year)) {
|
||||
Integer.valueOf(year);
|
||||
}
|
||||
if(StringUtils.isNotBlank(startsWith)) {
|
||||
startsWith = Utils.addEntities(startsWith);
|
||||
}
|
||||
}
|
||||
catch(Exception ex) {
|
||||
log.warn("We were unable to parse the browse request: maybe a cross-site scripting attach?");
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
|
||||
String valueFocus = request.getParameter("vfocus");
|
||||
String valueFocusLang = request.getParameter("vfocus_lang");
|
||||
String authority = request.getParameter("authority");
|
||||
@@ -110,12 +131,14 @@ public abstract class AbstractBrowserServlet extends DSpaceServlet
|
||||
|
||||
// process the input, performing some inline validation
|
||||
BrowseIndex bi = null;
|
||||
if (type != null && !"".equals(type))
|
||||
if (StringUtils.isNotEmpty(type))
|
||||
{
|
||||
bi = BrowseIndex.getBrowseIndex(type);
|
||||
}
|
||||
|
||||
if (bi == null)
|
||||
// don't override a requested index, if no index is set,
|
||||
// try to find it on a possibly specified sort option.
|
||||
if (type == null && bi == null)
|
||||
{
|
||||
if (sortBy > 0)
|
||||
{
|
||||
@@ -168,7 +191,7 @@ public abstract class AbstractBrowserServlet extends DSpaceServlet
|
||||
}
|
||||
|
||||
// if no resultsperpage set, default to 20 - if tag cloud enabled, leave it as is!
|
||||
if (resultsperpage < 0 && !bi.isTagCloudEnabled())
|
||||
if (bi != null && resultsperpage < 0 && !bi.isTagCloudEnabled())
|
||||
{
|
||||
resultsperpage = 20;
|
||||
}
|
||||
|
@@ -65,9 +65,16 @@ public class BrowserServlet extends AbstractBrowserServlet
|
||||
// all browse requests currently come to GET.
|
||||
BrowserScope scope = getBrowserScopeForRequest(context, request, response);
|
||||
|
||||
if (scope.getBrowseIndex() == null)
|
||||
if (scope == null || scope.getBrowseIndex() == null)
|
||||
{
|
||||
throw new ServletException("There is no browse index for the request");
|
||||
String requestURL = request.getRequestURI();
|
||||
if (request.getQueryString() != null)
|
||||
{
|
||||
requestURL += "?" + request.getQueryString();
|
||||
}
|
||||
log.warn("We were unable to parse the browse request (e.g. an unconfigured index or sort option was used). Will send a 400 Bad Request. Requested URL was: " + requestURL);
|
||||
response.sendError(HttpServletResponse.SC_BAD_REQUEST);
|
||||
return;
|
||||
}
|
||||
|
||||
// Is this a request to export the metadata, or a normal browse request?
|
||||
|
@@ -55,7 +55,7 @@ public class DisplayStatisticsServlet extends DSpaceServlet
|
||||
{
|
||||
|
||||
// is the statistics data publically viewable?
|
||||
boolean privatereport = ConfigurationManager.getBooleanProperty("usage-statistics", "authorization.admin");
|
||||
boolean privatereport = ConfigurationManager.getBooleanProperty("usage-statistics", "authorization.admin.usage");
|
||||
|
||||
// is the user a member of the Administrator (1) group?
|
||||
boolean admin = Group.isMember(context, 1);
|
||||
|
@@ -19,6 +19,7 @@ import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.commons.lang.StringEscapeUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.app.util.GoogleMetadata;
|
||||
import org.dspace.app.webui.util.Authenticate;
|
||||
@@ -129,8 +130,7 @@ public class HandleServlet extends DSpaceServlet
|
||||
|
||||
if (dso == null)
|
||||
{
|
||||
log.info(LogManager
|
||||
.getHeader(context, "invalid_id", "path=" + path));
|
||||
log.info(LogManager.getHeader(context, "invalid_id", "path=" + path));
|
||||
JSPManager.showInvalidIDError(request, response, StringEscapeUtils.escapeHtml(path), -1);
|
||||
|
||||
return;
|
||||
@@ -152,8 +152,48 @@ public class HandleServlet extends DSpaceServlet
|
||||
// and firing a usage event for the DSO we're reporting for
|
||||
return;
|
||||
|
||||
} else if ("/display-statistics.jsp".equals(extraPathInfo))
|
||||
{
|
||||
request.getRequestDispatcher(extraPathInfo).forward(request, response);
|
||||
// If we don't return here, we keep processing and end up
|
||||
// throwing a NPE when checking community authorization
|
||||
// and firing a usage event for the DSO we're reporting for
|
||||
return;
|
||||
} else if ("/browse".equals((extraPathInfo)) || StringUtils.startsWith(extraPathInfo, "/browse?")) {
|
||||
// Add the location if we got a community or collection
|
||||
if (dso instanceof Community)
|
||||
{
|
||||
Community c = (Community) dso;
|
||||
request.setAttribute("dspace.community", c);
|
||||
} else if (dso instanceof Collection)
|
||||
{
|
||||
Collection c = (Collection) dso;
|
||||
request.setAttribute("dspace.collection", c);
|
||||
}
|
||||
request.getRequestDispatcher(extraPathInfo).forward(request, response);
|
||||
// If we don't return here, we keep processing and end up
|
||||
// throwing a NPE when checking community authorization
|
||||
// and firing a usage event for the DSO we're reporting for
|
||||
return;
|
||||
} else if ("/simple-search".equals(extraPathInfo) || StringUtils.startsWith(extraPathInfo, "simple-search?")) {
|
||||
// Add the location if we got a community or collection
|
||||
if (dso instanceof Community)
|
||||
{
|
||||
Community c = (Community) dso;
|
||||
request.setAttribute("dspace.community", c);
|
||||
} else if (dso instanceof Collection)
|
||||
{
|
||||
Collection c = (Collection) dso;
|
||||
request.setAttribute("dspace.collection", c);
|
||||
}
|
||||
request.getRequestDispatcher(extraPathInfo).forward(request, response);
|
||||
// If we don't return here, we keep processing and end up
|
||||
// throwing a NPE when checking community authorization
|
||||
// and firing a usage event for the DSO we're reporting for
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
// OK, we have a valid Handle. What is it?
|
||||
if (dso.getType() == Constants.ITEM)
|
||||
{
|
||||
@@ -195,9 +235,9 @@ public class HandleServlet extends DSpaceServlet
|
||||
}
|
||||
else
|
||||
{
|
||||
// Forward to another servlet
|
||||
request.getRequestDispatcher(extraPathInfo).forward(request,
|
||||
response);
|
||||
log.debug("Found Item with extraPathInfo => Error.");
|
||||
JSPManager.showInvalidIDError(request, response, StringEscapeUtils.escapeHtml(path), -1);
|
||||
return;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -231,9 +271,9 @@ public class HandleServlet extends DSpaceServlet
|
||||
}
|
||||
else
|
||||
{
|
||||
// Forward to another servlet
|
||||
request.getRequestDispatcher(extraPathInfo).forward(request,
|
||||
response);
|
||||
log.debug("Found Collection with extraPathInfo => Error.");
|
||||
JSPManager.showInvalidIDError(request, response, StringEscapeUtils.escapeHtml(path), -1);
|
||||
return;
|
||||
}
|
||||
}
|
||||
else if (dso.getType() == Constants.COMMUNITY)
|
||||
@@ -255,9 +295,9 @@ public class HandleServlet extends DSpaceServlet
|
||||
}
|
||||
else
|
||||
{
|
||||
// Forward to another servlet
|
||||
request.getRequestDispatcher(extraPathInfo).forward(request,
|
||||
response);
|
||||
log.debug("Found Community with extraPathInfo => Error.");
|
||||
JSPManager.showInvalidIDError(request, response, StringEscapeUtils.escapeHtml(path), -1);
|
||||
return;
|
||||
}
|
||||
}
|
||||
else
|
||||
|
@@ -471,7 +471,7 @@ public class RegisterServlet extends DSpaceServlet
|
||||
// Need to create new eperson
|
||||
// FIXME: TEMPORARILY need to turn off authentication, as usually
|
||||
// only site admins can create e-people
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
eperson = EPerson.create(context);
|
||||
eperson.setEmail(email);
|
||||
if (netid!=null)
|
||||
@@ -479,7 +479,7 @@ public class RegisterServlet extends DSpaceServlet
|
||||
eperson.setNetid(netid.toLowerCase());
|
||||
}
|
||||
eperson.update();
|
||||
context.setIgnoreAuthorization(false);
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
// Now set the current user of the context
|
||||
|
@@ -9,6 +9,7 @@ package org.dspace.app.webui.servlet.admin;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@@ -16,6 +17,7 @@ import java.util.Map;
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import org.apache.commons.lang.time.DateUtils;
|
||||
|
||||
import org.dspace.app.util.AuthorizeUtil;
|
||||
import org.dspace.app.webui.servlet.DSpaceServlet;
|
||||
@@ -493,6 +495,20 @@ public class AuthorizeAdminServlet extends DSpaceServlet
|
||||
.getIntParameter(request, "collection_id");
|
||||
int communityId = UIUtil.getIntParameter(request, "community_id");
|
||||
int itemId = UIUtil.getIntParameter(request, "item_id");
|
||||
Date startDate = null;
|
||||
try {
|
||||
startDate = DateUtils.parseDate(request.getParameter("policy_start_date"),
|
||||
new String[]{"yyyy-MM-dd", "yyyy-MM", "yyyy"});
|
||||
} catch (Exception ex) {
|
||||
//Ignore start date is already null
|
||||
}
|
||||
Date endDate = null;
|
||||
try {
|
||||
endDate = DateUtils.parseDate(request.getParameter("policy_end_date"),
|
||||
new String[]{"yyyy-MM-dd", "yyyy-MM", "yyyy"});
|
||||
} catch (Exception ex) {
|
||||
//Ignore end date is already null
|
||||
}
|
||||
|
||||
Item item = null;
|
||||
Collection collection = null;
|
||||
@@ -574,6 +590,11 @@ public class AuthorizeAdminServlet extends DSpaceServlet
|
||||
// modify the policy
|
||||
policy.setAction(actionId);
|
||||
policy.setGroup(group);
|
||||
// start and end dates are used for Items and Bitstreams only.
|
||||
// Set start and end date even if they are null to be able to
|
||||
// delete previously set dates.
|
||||
policy.setStartDate(startDate);
|
||||
policy.setEndDate(endDate);
|
||||
policy.update();
|
||||
|
||||
// show edit form!
|
||||
|
@@ -26,7 +26,7 @@ import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import org.apache.commons.fileupload.FileUploadBase.FileSizeLimitExceededException;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.app.util.AuthorizeUtil;
|
||||
import org.dspace.app.util.Util;
|
||||
@@ -46,10 +46,13 @@ import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.MetadataSchema;
|
||||
import org.dspace.content.authority.Choices;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.handle.HandleManager;
|
||||
import org.dspace.license.CCLicense;
|
||||
import org.dspace.license.CCLookup;
|
||||
import org.dspace.license.CreativeCommons;
|
||||
|
||||
/**
|
||||
@@ -93,6 +96,9 @@ public class EditItemServlet extends DSpaceServlet
|
||||
/** User confirms withdrawal of item */
|
||||
public static final int PUBLICIZE = 11;
|
||||
|
||||
/** User updates Creative Commons License */
|
||||
public static final int UPDATE_CC = 12;
|
||||
|
||||
/** Logger */
|
||||
private static Logger log = Logger.getLogger(EditCommunitiesServlet.class);
|
||||
|
||||
@@ -181,7 +187,7 @@ public class EditItemServlet extends DSpaceServlet
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Respond to submitted forms. Each form includes an "action" parameter
|
||||
* indicating what needs to be done (from the constants above.)
|
||||
@@ -190,7 +196,14 @@ public class EditItemServlet extends DSpaceServlet
|
||||
|
||||
Item item = Item.find(context, UIUtil.getIntParameter(request,
|
||||
"item_id"));
|
||||
|
||||
|
||||
if (request.getParameter("submit_cancel_cc") != null)
|
||||
{
|
||||
showEditForm(context, request, response, item);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
String handle = HandleManager.findHandle(context, item);
|
||||
|
||||
// now check to see if person can edit item
|
||||
@@ -352,7 +365,60 @@ public class EditItemServlet extends DSpaceServlet
|
||||
context.complete();
|
||||
|
||||
break;
|
||||
|
||||
|
||||
case UPDATE_CC:
|
||||
|
||||
Map<String, String> map = new HashMap<String, String>();
|
||||
String licenseclass = (request.getParameter("licenseclass_chooser") != null) ? request.getParameter("licenseclass_chooser") : "";
|
||||
String jurisdiction = (ConfigurationManager.getProperty("cc.license.jurisdiction") != null) ? ConfigurationManager.getProperty("cc.license.jurisdiction") : "";
|
||||
if (licenseclass.equals("standard")) {
|
||||
map.put("commercial", request.getParameter("commercial_chooser"));
|
||||
map.put("derivatives", request.getParameter("derivatives_chooser"));
|
||||
} else if (licenseclass.equals("recombo")) {
|
||||
map.put("sampling", request.getParameter("sampling_chooser"));
|
||||
}
|
||||
map.put("jurisdiction", jurisdiction);
|
||||
CreativeCommons.MdField uriField = CreativeCommons.getCCField("uri");
|
||||
CreativeCommons.MdField nameField = CreativeCommons.getCCField("name");
|
||||
|
||||
boolean exit = false;
|
||||
if (licenseclass.equals("webui.Submission.submit.CCLicenseStep.no_license"))
|
||||
{
|
||||
|
||||
CreativeCommons.removeLicense(context, uriField, nameField, item);
|
||||
|
||||
item.update();
|
||||
context.commit();
|
||||
exit = true;
|
||||
}
|
||||
else if (licenseclass.equals("webui.Submission.submit.CCLicenseStep.select_change")) {
|
||||
//none
|
||||
exit = true;
|
||||
}
|
||||
|
||||
if (!exit) {
|
||||
CCLookup ccLookup = new CCLookup();
|
||||
ccLookup.issue(licenseclass, map, ConfigurationManager.getProperty("cc.license.locale"));
|
||||
if (ccLookup.isSuccess()) {
|
||||
CreativeCommons.removeLicense(context, uriField, nameField, item);
|
||||
|
||||
uriField.addItemValue(item, ccLookup.getLicenseUrl());
|
||||
if (ConfigurationManager.getBooleanProperty("cc.submit.addbitstream")) {
|
||||
CreativeCommons.setLicenseRDF(context, item, ccLookup.getRdf());
|
||||
}
|
||||
if (ConfigurationManager.getBooleanProperty("cc.submit.setname")) {
|
||||
nameField.addItemValue(item, ccLookup.getLicenseName());
|
||||
}
|
||||
|
||||
item.update();
|
||||
context.commit();
|
||||
}
|
||||
}
|
||||
showEditForm(context, request, response, item);
|
||||
context.complete();
|
||||
|
||||
break;
|
||||
|
||||
default:
|
||||
|
||||
// Erm... weird action value received.
|
||||
@@ -403,20 +469,6 @@ public class EditItemServlet extends DSpaceServlet
|
||||
HttpServletResponse response, Item item) throws ServletException,
|
||||
IOException, SQLException, AuthorizeException
|
||||
{
|
||||
if ( request.getParameter("cc_license_url") != null )
|
||||
{
|
||||
// check authorization
|
||||
AuthorizeUtil.authorizeManageCCLicense(context, item);
|
||||
|
||||
// turn off auth system to allow replace also to user that can't
|
||||
// remove/add bitstream to the item
|
||||
context.turnOffAuthorisationSystem();
|
||||
// set or replace existing CC license
|
||||
CreativeCommons.setLicense( context, item,
|
||||
request.getParameter("cc_license_url") );
|
||||
context.restoreAuthSystemState();
|
||||
context.commit();
|
||||
}
|
||||
|
||||
// Get the handle, if any
|
||||
String handle = HandleManager.findHandle(context, item);
|
||||
@@ -550,7 +602,10 @@ public class EditItemServlet extends DSpaceServlet
|
||||
request.setAttribute("collections", collections);
|
||||
request.setAttribute("dc.types", types);
|
||||
request.setAttribute("metadataFields", metadataFields);
|
||||
|
||||
|
||||
if(response.isCommitted()) {
|
||||
return;
|
||||
}
|
||||
JSPManager.showJSP(request, response, "/tools/edit-item-form.jsp");
|
||||
}
|
||||
|
||||
@@ -789,6 +844,19 @@ public class EditItemServlet extends DSpaceServlet
|
||||
{
|
||||
// Show cc-edit page
|
||||
request.setAttribute("item", item);
|
||||
|
||||
boolean exists = CreativeCommons.hasLicense(context, item);
|
||||
request.setAttribute("cclicense.exists", Boolean.valueOf(exists));
|
||||
|
||||
String ccLocale = ConfigurationManager.getProperty("cc.license.locale");
|
||||
/** Default locale to 'en' */
|
||||
ccLocale = (StringUtils.isNotBlank(ccLocale)) ? ccLocale : "en";
|
||||
request.setAttribute("cclicense.locale", ccLocale);
|
||||
|
||||
CCLookup cclookup = new CCLookup();
|
||||
java.util.Collection<CCLicense> collectionLicenses = cclookup.getLicenses(ccLocale);
|
||||
request.setAttribute("cclicense.licenses", collectionLicenses);
|
||||
|
||||
JSPManager
|
||||
.showJSP(request, response, "/tools/creative-commons-edit.jsp");
|
||||
}
|
||||
|
@@ -7,6 +7,7 @@
|
||||
*/
|
||||
package org.dspace.app.webui.submit.step;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.app.util.SubmissionInfo;
|
||||
import org.dspace.app.util.Util;
|
||||
@@ -18,8 +19,11 @@ import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.LicenseUtils;
|
||||
import org.dspace.content.WorkspaceItem;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.license.CCLicense;
|
||||
import org.dspace.license.CCLookup;
|
||||
import org.dspace.license.CreativeCommons;
|
||||
import org.dspace.submit.step.LicenseStep;
|
||||
|
||||
@@ -28,6 +32,8 @@ import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
|
||||
/**
|
||||
* License step for DSpace JSP-UI. Presents the user with license information
|
||||
@@ -104,6 +110,15 @@ public class JSPCCLicenseStep extends JSPStep
|
||||
boolean exists = CreativeCommons.hasLicense(context, item);
|
||||
request.setAttribute("cclicense.exists", Boolean.valueOf(exists));
|
||||
|
||||
String ccLocale = ConfigurationManager.getProperty("cc.license.locale");
|
||||
/** Default locale to 'en' */
|
||||
ccLocale = (StringUtils.isNotBlank(ccLocale)) ? ccLocale : "en";
|
||||
request.setAttribute("cclicense.locale", ccLocale);
|
||||
|
||||
CCLookup cclookup = new CCLookup();
|
||||
Collection<CCLicense> collectionLicenses = cclookup.getLicenses(ccLocale);
|
||||
request.setAttribute("cclicense.licenses", collectionLicenses);
|
||||
|
||||
JSPStepManager.showJSP(request, response, subInfo, CC_LICENSE_JSP);
|
||||
|
||||
}
|
||||
|
@@ -127,7 +127,11 @@
|
||||
</filter-mapping>
|
||||
|
||||
|
||||
<!-- kernel start listener (from impl), starts up the kernel for standalong webapps -->
|
||||
<!--
|
||||
DSpace Kernel startup listener. This listener is in charge of initializing/starting the
|
||||
DSpace Kernel. It MUST be listed BEFORE any other DSpace listeners, as DSpace services
|
||||
will not function until the Kernel is initialized.
|
||||
-->
|
||||
<listener>
|
||||
<listener-class>org.dspace.servicemanager.servlet.DSpaceKernelServletContextListener</listener-class>
|
||||
</listener>
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user