mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-07 10:04:21 +00:00
Compare commits
338 Commits
alert-auto
...
dspace-5.8
Author | SHA1 | Date | |
---|---|---|---|
![]() |
50eca14e9f | ||
![]() |
51b74510b9 | ||
![]() |
f96185dcea | ||
![]() |
98ac9ed3ce | ||
![]() |
50ac3b6819 | ||
![]() |
ec8e839ef5 | ||
![]() |
3832acc63e | ||
![]() |
12f978ecee | ||
![]() |
9981dfcacd | ||
![]() |
ec5056750f | ||
![]() |
266d016653 | ||
![]() |
4f0e0aec5e | ||
![]() |
f673b8da37 | ||
![]() |
e55212c14f | ||
![]() |
9401d971f6 | ||
![]() |
090b617c28 | ||
![]() |
504e2ae270 | ||
![]() |
99683cb810 | ||
![]() |
dab9bd40ed | ||
![]() |
7bbeea2633 | ||
![]() |
0182392563 | ||
![]() |
40d5f113a9 | ||
![]() |
97b22916f4 | ||
![]() |
711b4e8a96 | ||
![]() |
778f9dfec0 | ||
![]() |
15046de363 | ||
![]() |
ed8b31721f | ||
![]() |
9edd2cd218 | ||
![]() |
a7bed3a293 | ||
![]() |
d917b3158d | ||
![]() |
aef0f52a5b | ||
![]() |
09713ea4a8 | ||
![]() |
54f5cd87fc | ||
![]() |
c8f62e6f49 | ||
![]() |
bdf665b07e | ||
![]() |
8a790dedd3 | ||
![]() |
23aa21ae86 | ||
![]() |
9f531fb244 | ||
![]() |
5a1943cf22 | ||
![]() |
2a627d8bbd | ||
![]() |
632a55d894 | ||
![]() |
b434b999b7 | ||
![]() |
3963d3929e | ||
![]() |
f81cdf5283 | ||
![]() |
dd7502f758 | ||
![]() |
d557c019f2 | ||
![]() |
7467741624 | ||
![]() |
91a00e237c | ||
![]() |
f45252547d | ||
![]() |
d2c123d8c9 | ||
![]() |
943619248a | ||
![]() |
848aea9b27 | ||
![]() |
b8e784f8c2 | ||
![]() |
0b9d05154c | ||
![]() |
5a81ba0f3b | ||
![]() |
63ab1f13f8 | ||
![]() |
19d8144faa | ||
![]() |
7d53df0d6b | ||
![]() |
2bd6c2e392 | ||
![]() |
b0e624d72c | ||
![]() |
b37bd18c51 | ||
![]() |
51bb72da2a | ||
![]() |
3c8ecb5d1f | ||
![]() |
59302e9d6f | ||
![]() |
567ec083c8 | ||
![]() |
329f3b48a6 | ||
![]() |
a52779c571 | ||
![]() |
9a0334da7f | ||
![]() |
8e4db1344e | ||
![]() |
a9b8cca20f | ||
![]() |
93f368ff6b | ||
![]() |
fbc023019c | ||
![]() |
e00dc3d421 | ||
![]() |
77a4da32ec | ||
![]() |
79014ed943 | ||
![]() |
0dbaa81b54 | ||
![]() |
c36e6f9f02 | ||
![]() |
f7b6c83e99 | ||
![]() |
2510609f68 | ||
![]() |
03724151be | ||
![]() |
52db795b72 | ||
![]() |
5f3f552078 | ||
![]() |
39f4db91da | ||
![]() |
04ba49ba56 | ||
![]() |
1aa92f8d00 | ||
![]() |
85f2195396 | ||
![]() |
c5cdedb0c6 | ||
![]() |
b805aaf1dd | ||
![]() |
da315a4911 | ||
![]() |
ea4e3ee857 | ||
![]() |
1c4089c6b2 | ||
![]() |
e8a06006ae | ||
![]() |
9e0208fa96 | ||
![]() |
76d6dec743 | ||
![]() |
427ba190a6 | ||
![]() |
bdd4eb20dc | ||
![]() |
c7cbd44330 | ||
![]() |
50a4f046d4 | ||
![]() |
b5330b7815 | ||
![]() |
4fed285c83 | ||
![]() |
9390016397 | ||
![]() |
b3c7f0a7f1 | ||
![]() |
8da8431869 | ||
![]() |
2549e643f9 | ||
![]() |
ac0721767b | ||
![]() |
679c971ec3 | ||
![]() |
b50d35d3f3 | ||
![]() |
d6412e9af3 | ||
![]() |
067c1b1a95 | ||
![]() |
20026af124 | ||
![]() |
b3f9ea0eaa | ||
![]() |
987a16d23f | ||
![]() |
43d44aa0cc | ||
![]() |
307d577b35 | ||
![]() |
04c60ba939 | ||
![]() |
462360ed4d | ||
![]() |
c6fda557f7 | ||
![]() |
e73f83f7a4 | ||
![]() |
9f0f5940e7 | ||
![]() |
88ed833e2c | ||
![]() |
91d4081b03 | ||
![]() |
d9e986d669 | ||
![]() |
132f37a10a | ||
![]() |
98a26fa3e7 | ||
![]() |
4f5f5acdbe | ||
![]() |
212011cc75 | ||
![]() |
e7b49d8310 | ||
![]() |
a70f0bdd22 | ||
![]() |
a84763a258 | ||
![]() |
5a1028a7a9 | ||
![]() |
16b123e9df | ||
![]() |
f057ed8c07 | ||
![]() |
875bb59eb0 | ||
![]() |
2c09aea8fd | ||
![]() |
533245c8dd | ||
![]() |
875bba3add | ||
![]() |
55e623d1c2 | ||
![]() |
81a6d173ca | ||
![]() |
3ff604742b | ||
![]() |
3bfe7b8ea8 | ||
![]() |
ee62f9d6f0 | ||
![]() |
be35b0450b | ||
![]() |
8c94edc29c | ||
![]() |
2bf0275678 | ||
![]() |
86ca33eaa3 | ||
![]() |
f64d4b3367 | ||
![]() |
c908997900 | ||
![]() |
e2dd1089c9 | ||
![]() |
8809150e66 | ||
![]() |
1fd2723848 | ||
![]() |
454f40b3f4 | ||
![]() |
f05c9e794f | ||
![]() |
56fc41cac3 | ||
![]() |
0175e5edff | ||
![]() |
d17886c1cd | ||
![]() |
06668c363e | ||
![]() |
4b3a07120c | ||
![]() |
50c4a54bd6 | ||
![]() |
0aabf5d780 | ||
![]() |
04ce6ff2f4 | ||
![]() |
1f8f6241c2 | ||
![]() |
4a2f392ed8 | ||
![]() |
fac705ec3f | ||
![]() |
e1263249f5 | ||
![]() |
553b1a72c5 | ||
![]() |
6242865207 | ||
![]() |
59fa31641a | ||
![]() |
58344b610f | ||
![]() |
563d90f7c4 | ||
![]() |
131555604a | ||
![]() |
fbde108024 | ||
![]() |
2c59a9dd35 | ||
![]() |
d307c56d07 | ||
![]() |
1d2b954889 | ||
![]() |
69cfc61167 | ||
![]() |
b944ceb112 | ||
![]() |
9885ed851a | ||
![]() |
52ce1eb52b | ||
![]() |
deeef45943 | ||
![]() |
ad21875ac8 | ||
![]() |
4ee79a3d89 | ||
![]() |
c01c3af153 | ||
![]() |
f493a475fd | ||
![]() |
a3a5f562c9 | ||
![]() |
3479b0a254 | ||
![]() |
39289b6762 | ||
![]() |
edf7ea6524 | ||
![]() |
2045fee8ab | ||
![]() |
bac9beaffa | ||
![]() |
569ad5f546 | ||
![]() |
b465f26646 | ||
![]() |
ad19c3aeb6 | ||
![]() |
34c20d49ad | ||
![]() |
eaa08adb62 | ||
![]() |
15f3c247bc | ||
![]() |
2a44765f39 | ||
![]() |
87c34f1f1c | ||
![]() |
fce84880bc | ||
![]() |
3f94c3acb4 | ||
![]() |
50cb865ea2 | ||
![]() |
a9b8d8bfbc | ||
![]() |
600f680cd6 | ||
![]() |
01d7d060d7 | ||
![]() |
4a6663c2f4 | ||
![]() |
b3c87b2be7 | ||
![]() |
ac08b6a4e3 | ||
![]() |
a2f5fe34eb | ||
![]() |
ace19199e5 | ||
![]() |
6d9fa26535 | ||
![]() |
3efe549774 | ||
![]() |
734744ec4f | ||
![]() |
829c30bab4 | ||
![]() |
83cb04ed53 | ||
![]() |
0911d60290 | ||
![]() |
9bb7036857 | ||
![]() |
e0368f3ade | ||
![]() |
660217c3f9 | ||
![]() |
5f13b8cc64 | ||
![]() |
a2caabc79a | ||
![]() |
cb9710cda4 | ||
![]() |
56abebaece | ||
![]() |
0310db74aa | ||
![]() |
3e1bac69df | ||
![]() |
ec86af5a82 | ||
![]() |
79e111996b | ||
![]() |
f4c6f2680c | ||
![]() |
f3487be040 | ||
![]() |
87d0770974 | ||
![]() |
1c9fa656aa | ||
![]() |
59ff964f4f | ||
![]() |
10c4661885 | ||
![]() |
afe9c1294f | ||
![]() |
7a54972ed1 | ||
![]() |
b2cb0ef4dd | ||
![]() |
5edf641d6c | ||
![]() |
d9b14a86f0 | ||
![]() |
7b8fa49632 | ||
![]() |
b5540d5999 | ||
![]() |
494ff0c4c1 | ||
![]() |
1c4c8943a9 | ||
![]() |
5cd56fb834 | ||
![]() |
ed89d6b00e | ||
![]() |
19b28f4734 | ||
![]() |
4a8fdf6843 | ||
![]() |
d040b9dd4e | ||
![]() |
4036bf781a | ||
![]() |
d011e24f74 | ||
![]() |
0e9f78e9df | ||
![]() |
254097b2e2 | ||
![]() |
8049cef23b | ||
![]() |
de842dbf30 | ||
![]() |
8bcac58154 | ||
![]() |
511b78277f | ||
![]() |
dbd019943a | ||
![]() |
7d8a9d5636 | ||
![]() |
2ab6b10a03 | ||
![]() |
cd7789e8df | ||
![]() |
9287aa891f | ||
![]() |
a99203382c | ||
![]() |
6ec649df78 | ||
![]() |
e9f4e4c2cc | ||
![]() |
18cc6bb3ff | ||
![]() |
8094d8fe18 | ||
![]() |
b7a469d53c | ||
![]() |
f168c6c33d | ||
![]() |
981b62d9e9 | ||
![]() |
2c42d71a6a | ||
![]() |
ca6bc57c6d | ||
![]() |
0f0be17d0a | ||
![]() |
5e5a7922d0 | ||
![]() |
bb4cb39373 | ||
![]() |
a257f516fa | ||
![]() |
9d8284d85f | ||
![]() |
57efa4f628 | ||
![]() |
5b5f44085a | ||
![]() |
46ce2741bc | ||
![]() |
0b799fc882 | ||
![]() |
04b57a60b3 | ||
![]() |
02b4314046 | ||
![]() |
3d79fa76ab | ||
![]() |
ca1803ae93 | ||
![]() |
9046ec21d4 | ||
![]() |
b30654e3d5 | ||
![]() |
ee19e11e6d | ||
![]() |
a990c97959 | ||
![]() |
56816b13ba | ||
![]() |
b414aaa195 | ||
![]() |
1a1ae35ec9 | ||
![]() |
1029f393e4 | ||
![]() |
c1039dfe26 | ||
![]() |
cc96646e37 | ||
![]() |
d2ad7c81de | ||
![]() |
00e9c1131f | ||
![]() |
77cc9abe49 | ||
![]() |
91018bfe0f | ||
![]() |
7f9bcb283f | ||
![]() |
ae11c1c795 | ||
![]() |
9cd5fa596b | ||
![]() |
e10b10224a | ||
![]() |
e08886ae09 | ||
![]() |
df3ffcf7f9 | ||
![]() |
0c77f7be91 | ||
![]() |
cdc8e3144e | ||
![]() |
92847079d7 | ||
![]() |
b023c36941 | ||
![]() |
aee3b0b710 | ||
![]() |
d0c8afb601 | ||
![]() |
e9c14bbcea | ||
![]() |
2eca19daa3 | ||
![]() |
bcc7a75baa | ||
![]() |
19222e9341 | ||
![]() |
8124a61738 | ||
![]() |
09007146d0 | ||
![]() |
e715c64404 | ||
![]() |
53ff4510ac | ||
![]() |
495031001d | ||
![]() |
97e89384f1 | ||
![]() |
72913cda76 | ||
![]() |
03097aaa35 | ||
![]() |
f6d3f67b52 | ||
![]() |
62e0ac462e | ||
![]() |
54310b014b | ||
![]() |
beaf54f624 | ||
![]() |
114f1e0985 | ||
![]() |
1fdfe05c4c | ||
![]() |
9c1f91d40b | ||
![]() |
39711b332f | ||
![]() |
6cfda147b4 | ||
![]() |
eabdc610a0 | ||
![]() |
da74f5aa7e | ||
![]() |
14c575a7c4 | ||
![]() |
d8c8d28c13 | ||
![]() |
bf56f1f7e3 | ||
![]() |
8046d154ee | ||
![]() |
589117e204 | ||
![]() |
e9e5423f97 | ||
![]() |
c08f447cec | ||
![]() |
cf25175155 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -3,6 +3,7 @@ target/
|
||||
|
||||
## Ignore project files created by Eclipse
|
||||
.settings/
|
||||
/bin/
|
||||
.project
|
||||
.classpath
|
||||
|
||||
|
@@ -1,4 +1,5 @@
|
||||
language: java
|
||||
sudo: false
|
||||
|
||||
env:
|
||||
# Give Maven 1GB of memory to work with
|
||||
@@ -6,6 +7,8 @@ env:
|
||||
|
||||
# Install prerequisites for building Mirage2 more rapidly
|
||||
before_install:
|
||||
# Remove outdated settings.xml from Travis builds. Workaround for https://github.com/travis-ci/travis-ci/issues/4629
|
||||
- rm ~/.m2/settings.xml
|
||||
# Install latest Node.js 0.10.x & print version info
|
||||
- nvm install 0.10
|
||||
- node --version
|
||||
@@ -17,10 +20,10 @@ before_install:
|
||||
# Print ruby version info (should be installed)
|
||||
- ruby -v
|
||||
# Install Sass & print version info
|
||||
- gem install sass
|
||||
- gem install sass -v 3.3.14
|
||||
- sass -v
|
||||
# Install Compass & print version info
|
||||
- gem install compass
|
||||
- gem install compass -v 1.0.1
|
||||
- compass version
|
||||
|
||||
# Skip install stage, as we'll do it below
|
||||
|
2
LICENSE
2
LICENSE
@@ -1,7 +1,7 @@
|
||||
DSpace source code license:
|
||||
|
||||
|
||||
Copyright (c) 2002-2013, DuraSpace. All rights reserved.
|
||||
Copyright (c) 2002-2017, DuraSpace. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
|
@@ -266,22 +266,24 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Morfologik Stemming Dictionary for Polish (org.carrot2:morfologik-polish:1.7.1 - http://morfologik.blogspot.com/morfologik-polish/)
|
||||
* Morfologik Stemming APIs (org.carrot2:morfologik-stemming:1.7.1 - http://morfologik.blogspot.com/morfologik-stemming/)
|
||||
* databene ContiPerf (org.databene:contiperf:2.2.0 - http://databene.org/contiperf)
|
||||
* DSpace Kernel :: API and Implementation (org.dspace:dspace-api:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api)
|
||||
* DSpace JSP-UI (org.dspace:dspace-jspui:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui)
|
||||
* DSpace OAI-PMH (org.dspace:dspace-oai:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai)
|
||||
* DSpace RDF (org.dspace:dspace-rdf:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf)
|
||||
* DSpace REST :: API and Implementation (org.dspace:dspace-rest:5.0-rc4-SNAPSHOT - http://demo.dspace.org)
|
||||
* DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services)
|
||||
* Apache Solr Webapp (org.dspace:dspace-solr:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr)
|
||||
* DSpace SWORD (org.dspace:dspace-sword:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword)
|
||||
* DSpace SWORD v2 (org.dspace:dspace-swordv2:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2)
|
||||
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui)
|
||||
* DSpace Kernel :: API and Implementation (org.dspace:dspace-api:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api)
|
||||
* DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:5.0.4 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-api-lang)
|
||||
* DSpace JSP-UI (org.dspace:dspace-jspui:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui)
|
||||
* DSpace OAI-PMH (org.dspace:dspace-oai:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai)
|
||||
* DSpace RDF (org.dspace:dspace-rdf:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf)
|
||||
* DSpace REST :: API and Implementation (org.dspace:dspace-rest:5.3-SNAPSHOT - http://demo.dspace.org)
|
||||
* DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services)
|
||||
* Apache Solr Webapp (org.dspace:dspace-solr:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr)
|
||||
* DSpace SWORD (org.dspace:dspace-sword:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword)
|
||||
* DSpace SWORD v2 (org.dspace:dspace-swordv2:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2)
|
||||
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui)
|
||||
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:5.0.5 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-xmlui-lang)
|
||||
* handle (org.dspace:handle:6.2 - no url defined)
|
||||
* jargon (org.dspace:jargon:1.4.25 - no url defined)
|
||||
* mets (org.dspace:mets:1.5.2 - no url defined)
|
||||
* oclc-harvester2 (org.dspace:oclc-harvester2:0.1.12 - no url defined)
|
||||
* Repackaged Cocoon Servlet Service Implementation (org.dspace.dependencies.cocoon:dspace-cocoon-servlet-service-impl:1.0.3 - http://projects.dspace.org/dspace-pom/dspace-cocoon-servlet-service-impl)
|
||||
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
|
||||
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:5.3-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
|
||||
* Hamcrest All (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all)
|
||||
* Hamcrest Core (org.hamcrest:hamcrest-core:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-core)
|
||||
* JBibTeX (org.jbibtex:jbibtex:1.0.10 - http://www.jbibtex.org)
|
||||
@@ -386,8 +388,3 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Dough Lea's util.concurrent package (concurrent:concurrent:1.3.4 - no url defined)
|
||||
* Reflections (org.reflections:reflections:0.9.9-RC1 - http://code.google.com/p/reflections/reflections/)
|
||||
* XZ for Java (org.tukaani:xz:1.4 - http://tukaani.org/xz/java.html)
|
||||
|
||||
Unknown license:
|
||||
|
||||
* DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:5.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-api-lang)
|
||||
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:5.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-xmlui-lang)
|
||||
|
@@ -66,14 +66,12 @@ db.password=dspace
|
||||
#db.username=dspace
|
||||
#db.password=dspace
|
||||
|
||||
# Schema name - if your database contains multiple schemas, you can avoid problems with
|
||||
# retrieving the definitions of duplicate object names by specifying
|
||||
# the schema name here that is used for DSpace by uncommenting the following entry
|
||||
|
||||
# NOTE: this configuration option is for PostgreSQL only. For Oracle, schema is equivalent
|
||||
# to user name. DSpace depends on the PostgreSQL understanding of schema. If you are using
|
||||
# Oracle, just leave this this value blank.
|
||||
|
||||
# Schema name - if your database contains multiple schemas, you can avoid
|
||||
# problems with retrieving the definitions of duplicate object names by
|
||||
# specifying the schema name that is used for DSpace.
|
||||
# ORACLE USAGE NOTE: In Oracle, schema is equivalent to "username". This means
|
||||
# specifying a "db.schema" is often unnecessary (i.e. you can leave it blank),
|
||||
# UNLESS your Oracle DB Account (in db.username) has access to multiple schemas.
|
||||
db.schema =
|
||||
|
||||
# Maximum number of DB connections in pool
|
||||
|
@@ -12,7 +12,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>5.0</version>
|
||||
<version>5.8</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -603,13 +603,21 @@
|
||||
<groupId>com.google.apis</groupId>
|
||||
<artifactId>google-api-services-analytics</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.api-client</groupId>
|
||||
<artifactId>google-api-client</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.http-client</groupId>
|
||||
<artifactId>google-http-client</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.http-client</groupId>
|
||||
<artifactId>google-http-client-jackson2</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.oauth-client</groupId>
|
||||
<artifactId>google-oauth-client-jetty</artifactId>
|
||||
<artifactId>google-oauth-client</artifactId>
|
||||
</dependency>
|
||||
<!-- FindBugs -->
|
||||
<dependency>
|
||||
|
@@ -113,8 +113,8 @@ public class CommunityFiliator
|
||||
CommunityFiliator filiator = new CommunityFiliator();
|
||||
Context c = new Context();
|
||||
|
||||
// ve are superuser!
|
||||
c.setIgnoreAuthorization(true);
|
||||
// we are superuser!
|
||||
c.turnOffAuthorisationSystem();
|
||||
|
||||
try
|
||||
{
|
||||
|
@@ -228,7 +228,7 @@ public final class CreateAdministrator
|
||||
{
|
||||
// Of course we aren't an administrator yet so we need to
|
||||
// circumvent authorisation
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
// Find administrator group
|
||||
Group admins = Group.find(context, 1);
|
||||
|
@@ -88,7 +88,7 @@ public class MetadataExporter
|
||||
{
|
||||
// create a context
|
||||
Context context = new Context();
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
OutputFormat xmlFormat = new OutputFormat(Method.XML, "UTF-8", true);
|
||||
xmlFormat.setLineWidth(120);
|
||||
|
@@ -1169,10 +1169,8 @@ public class MetadataImport
|
||||
*/
|
||||
private static boolean isAuthorityControlledField(String md)
|
||||
{
|
||||
int pos = md.indexOf("[");
|
||||
String mdf = (pos > -1 ? md.substring(0, pos) : md);
|
||||
pos = md.indexOf(":");
|
||||
mdf = (pos > -1 ? md.substring(pos+1) : md);
|
||||
String mdf = StringUtils.substringAfter(md, ":");
|
||||
mdf = StringUtils.substringBefore(mdf, "[");
|
||||
return authorityControlled.contains(mdf);
|
||||
}
|
||||
|
||||
|
@@ -89,16 +89,16 @@ public class MetadataImportInvalidHeadingException extends Exception
|
||||
{
|
||||
if (type == SCHEMA)
|
||||
{
|
||||
return "Unknown metadata schema in row " + column + ": " + badHeading;
|
||||
return "Unknown metadata schema in column " + column + ": " + badHeading;
|
||||
} else if (type == ELEMENT)
|
||||
{
|
||||
return "Unknown metadata element in row " + column + ": " + badHeading;
|
||||
return "Unknown metadata element in column " + column + ": " + badHeading;
|
||||
} else if (type == MISSING)
|
||||
{
|
||||
return "Row with missing header: Row " + column;
|
||||
return "Row with missing header: column " + column;
|
||||
} else
|
||||
{
|
||||
return "Bad metadata declaration in row " + column + ": " + badHeading;
|
||||
return "Bad metadata declaration in column" + column + ": " + badHeading;
|
||||
}
|
||||
}
|
||||
}
|
@@ -209,7 +209,7 @@ public class ItemExport
|
||||
}
|
||||
|
||||
Context c = new Context();
|
||||
c.setIgnoreAuthorization(true);
|
||||
c.turnOffAuthorisationSystem();
|
||||
|
||||
if (myType == Constants.ITEM)
|
||||
{
|
||||
|
@@ -284,7 +284,7 @@ public class ItemImport
|
||||
if (line.hasOption('z'))
|
||||
{
|
||||
zip = true;
|
||||
zipfilename = sourcedir + System.getProperty("file.separator") + line.getOptionValue('z');
|
||||
zipfilename = line.getOptionValue('z');
|
||||
}
|
||||
|
||||
//By default assume collections will be given on the command line
|
||||
@@ -733,7 +733,7 @@ public class ItemImport
|
||||
{
|
||||
clist = mycollections;
|
||||
}
|
||||
addItem(c, mycollections, sourceDir, dircontents[i], mapOut, template);
|
||||
addItem(c, clist, sourceDir, dircontents[i], mapOut, template);
|
||||
System.out.println(i + " " + dircontents[i]);
|
||||
c.clearCache();
|
||||
}
|
||||
@@ -1108,6 +1108,10 @@ public class ItemImport
|
||||
{
|
||||
value = "";
|
||||
}
|
||||
else
|
||||
{
|
||||
value = value.trim();
|
||||
}
|
||||
// //getElementData(n, "element");
|
||||
String element = getAttributeValue(n, "element");
|
||||
String qualifier = getAttributeValue(n, "qualifier"); //NodeValue();
|
||||
@@ -1129,8 +1133,8 @@ public class ItemImport
|
||||
{
|
||||
qualifier = null;
|
||||
}
|
||||
|
||||
if (!isTest)
|
||||
// only add metadata if it is no test and there is an real value
|
||||
if (!isTest && !value.equals(""))
|
||||
{
|
||||
i.addMetadata(schema, element, qualifier, language, value);
|
||||
}
|
||||
@@ -2148,7 +2152,7 @@ public class ItemImport
|
||||
context = new Context();
|
||||
eperson = EPerson.find(context, oldEPerson.getID());
|
||||
context.setCurrentUser(eperson);
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
boolean isResume = theResumeDir!=null;
|
||||
|
||||
|
@@ -351,7 +351,7 @@ public class ItemUpdate {
|
||||
|
||||
context = new Context();
|
||||
iu.setEPerson(context, iu.eperson);
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
HANDLE_PREFIX = ConfigurationManager.getProperty("handle.canonical.prefix");
|
||||
if (HANDLE_PREFIX == null || HANDLE_PREFIX.length() == 0)
|
||||
@@ -362,19 +362,20 @@ public class ItemUpdate {
|
||||
iu.processArchive(context, sourcedir, itemField, metadataIndexName, alterProvenance, isTest);
|
||||
|
||||
context.complete(); // complete all transactions
|
||||
context.setIgnoreAuthorization(false);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
if (context != null && context.isValid())
|
||||
{
|
||||
context.abort();
|
||||
context.setIgnoreAuthorization(false);
|
||||
}
|
||||
e.printStackTrace();
|
||||
pr(e.toString());
|
||||
status = 1;
|
||||
}
|
||||
finally {
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
if (isTest)
|
||||
{
|
||||
|
@@ -11,6 +11,7 @@ import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.List;
|
||||
import java.util.TreeMap;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.servicemanager.DSpaceKernelImpl;
|
||||
import org.dspace.servicemanager.DSpaceKernelInit;
|
||||
@@ -275,9 +276,21 @@ public class ScriptLauncher
|
||||
*/
|
||||
private static void display()
|
||||
{
|
||||
// List all command elements
|
||||
List<Element> commands = commandConfigs.getRootElement().getChildren("command");
|
||||
System.out.println("Usage: dspace [command-name] {parameters}");
|
||||
|
||||
// Sort the commands by name.
|
||||
// We cannot just use commands.sort() because it tries to remove and
|
||||
// reinsert Elements within other Elements, and that doesn't work.
|
||||
TreeMap<String, Element> sortedCommands = new TreeMap<>();
|
||||
for (Element command : commands)
|
||||
{
|
||||
sortedCommands.put(command.getChild("name").getValue(), command);
|
||||
}
|
||||
|
||||
// Display the sorted list
|
||||
System.out.println("Usage: dspace [command-name] {parameters}");
|
||||
for (Element command : sortedCommands.values())
|
||||
{
|
||||
System.out.println(" - " + command.getChild("name").getValue() +
|
||||
": " + command.getChild("description").getValue());
|
||||
|
@@ -7,9 +7,10 @@
|
||||
*/
|
||||
package org.dspace.app.mediafilter;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
|
||||
|
||||
/**
|
||||
@@ -30,8 +31,23 @@ public class ImageMagickImageThumbnailFilter extends ImageMagickThumbnailFilter
|
||||
throws Exception
|
||||
{
|
||||
File f = inputStreamToTempFile(source, "imthumb", ".tmp");
|
||||
File f2 = getThumbnailFile(f);
|
||||
return new FileInputStream(f2);
|
||||
File f2 = null;
|
||||
try
|
||||
{
|
||||
f2 = getThumbnailFile(f);
|
||||
byte[] bytes = Files.readAllBytes(f2.toPath());
|
||||
return new ByteArrayInputStream(bytes);
|
||||
}
|
||||
finally
|
||||
{
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f.delete();
|
||||
if (f2 != null)
|
||||
{
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f2.delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@@ -7,24 +7,40 @@
|
||||
*/
|
||||
package org.dspace.app.mediafilter;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
|
||||
public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter {
|
||||
public InputStream getDestinationStream(InputStream source)
|
||||
throws Exception
|
||||
{
|
||||
File f = inputStreamToTempFile(source, "impdfthumb", ".pdf");
|
||||
File f2 = getImageFile(f, 0);
|
||||
File f3 = getThumbnailFile(f2);
|
||||
return new FileInputStream(f3);
|
||||
}
|
||||
|
||||
public static final String[] PDF = {"Adobe PDF"};
|
||||
public String[] getInputMIMETypes()
|
||||
File f2 = null;
|
||||
File f3 = null;
|
||||
try
|
||||
{
|
||||
return PDF;
|
||||
f2 = getImageFile(f, 0);
|
||||
f3 = getThumbnailFile(f2);
|
||||
byte[] bytes = Files.readAllBytes(f3.toPath());
|
||||
return new ByteArrayInputStream(bytes);
|
||||
}
|
||||
finally
|
||||
{
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f.delete();
|
||||
if (f2 != null)
|
||||
{
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f2.delete();
|
||||
}
|
||||
if (f3 != null)
|
||||
{
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f3.delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -23,6 +23,7 @@ import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.im4java.core.ConvertCmd;
|
||||
import org.im4java.core.Info;
|
||||
import org.im4java.core.IM4JavaException;
|
||||
import org.im4java.core.IMOperation;
|
||||
import org.im4java.process.ProcessStarter;
|
||||
@@ -34,13 +35,15 @@ import org.dspace.core.ConfigurationManager;
|
||||
* thumbnail.maxwidth, thumbnail.maxheight, the size we want our thumbnail to be
|
||||
* no bigger than. Creates only JPEGs.
|
||||
*/
|
||||
public abstract class ImageMagickThumbnailFilter extends MediaFilter implements SelfRegisterInputFormats
|
||||
{
|
||||
public abstract class ImageMagickThumbnailFilter extends MediaFilter {
|
||||
private static int width = 180;
|
||||
private static int height = 120;
|
||||
private static boolean flatten = true;
|
||||
static String bitstreamDescription = "IM Thumbnail";
|
||||
static final String defaultPattern = "Generated Thumbnail";
|
||||
static Pattern replaceRegex = Pattern.compile(defaultPattern);
|
||||
static String cmyk_profile;
|
||||
static String srgb_profile;
|
||||
|
||||
static {
|
||||
String pre = ImageMagickThumbnailFilter.class.getName();
|
||||
@@ -48,15 +51,18 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
|
||||
ProcessStarter.setGlobalSearchPath(s);
|
||||
width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width);
|
||||
height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height);
|
||||
flatten = ConfigurationManager.getBooleanProperty(pre + ".flatten", flatten);
|
||||
String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription");
|
||||
cmyk_profile = ConfigurationManager.getProperty(pre + ".cmyk_profile");
|
||||
srgb_profile = ConfigurationManager.getProperty(pre + ".srgb_profile");
|
||||
if (description != null) {
|
||||
bitstreamDescription = description;
|
||||
}
|
||||
try {
|
||||
String patt = ConfigurationManager.getProperty(pre + ".replaceRegex");
|
||||
replaceRegex = Pattern.compile(patt == null ? defaultPattern : patt);
|
||||
} catch(PatternSyntaxException e) {
|
||||
System.err.println("Invalid thumbnail replacement pattern: "+e.getMessage());
|
||||
} catch (PatternSyntaxException e) {
|
||||
System.err.println("Invalid thumbnail replacement pattern: " + e.getMessage());
|
||||
}
|
||||
|
||||
}
|
||||
@@ -64,9 +70,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
|
||||
public ImageMagickThumbnailFilter() {
|
||||
}
|
||||
|
||||
|
||||
public String getFilteredName(String oldFilename)
|
||||
{
|
||||
public String getFilteredName(String oldFilename) {
|
||||
return oldFilename + ".jpg";
|
||||
}
|
||||
|
||||
@@ -74,24 +78,21 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
|
||||
* @return String bundle name
|
||||
*
|
||||
*/
|
||||
public String getBundleName()
|
||||
{
|
||||
public String getBundleName() {
|
||||
return "THUMBNAIL";
|
||||
}
|
||||
|
||||
/**
|
||||
* @return String bitstreamformat
|
||||
*/
|
||||
public String getFormatString()
|
||||
{
|
||||
public String getFormatString() {
|
||||
return "JPEG";
|
||||
}
|
||||
|
||||
/**
|
||||
* @return String bitstreamDescription
|
||||
*/
|
||||
public String getDescription()
|
||||
{
|
||||
public String getDescription() {
|
||||
return bitstreamDescription;
|
||||
}
|
||||
|
||||
@@ -119,7 +120,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
|
||||
op.thumbnail(width, height);
|
||||
op.addImage(f2.getAbsolutePath());
|
||||
if (MediaFilterManager.isVerbose) {
|
||||
System.out.println("IM Thumbnail Param: "+op);
|
||||
System.out.println("IM Thumbnail Param: " + op);
|
||||
}
|
||||
cmd.run(op);
|
||||
return f2;
|
||||
@@ -131,64 +132,65 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
|
||||
ConvertCmd cmd = new ConvertCmd();
|
||||
IMOperation op = new IMOperation();
|
||||
String s = "[" + page + "]";
|
||||
op.addImage(f.getAbsolutePath()+s);
|
||||
op.addImage(f.getAbsolutePath() + s);
|
||||
if (flatten) {
|
||||
op.flatten();
|
||||
}
|
||||
// PDFs using the CMYK color system can be handled specially if
|
||||
// profiles are defined
|
||||
if (cmyk_profile != null && srgb_profile != null) {
|
||||
Info imageInfo = new Info(f.getAbsolutePath(), true);
|
||||
String imageClass = imageInfo.getImageClass();
|
||||
if (imageClass.contains("CMYK")) {
|
||||
op.profile(cmyk_profile);
|
||||
op.profile(srgb_profile);
|
||||
}
|
||||
}
|
||||
op.addImage(f2.getAbsolutePath());
|
||||
if (MediaFilterManager.isVerbose) {
|
||||
System.out.println("IM Image Param: "+op);
|
||||
System.out.println("IM Image Param: " + op);
|
||||
}
|
||||
cmd.run(op);
|
||||
return f2;
|
||||
}
|
||||
|
||||
public boolean preProcessBitstream(Context c, Item item, Bitstream source)
|
||||
throws Exception
|
||||
{
|
||||
public boolean preProcessBitstream(Context c, Item item, Bitstream source) throws Exception {
|
||||
String nsrc = source.getName();
|
||||
for(Bundle b: item.getBundles("THUMBNAIL")) {
|
||||
for(Bitstream bit: b.getBitstreams()) {
|
||||
for (Bundle b : item.getBundles("THUMBNAIL")) {
|
||||
for (Bitstream bit : b.getBitstreams()) {
|
||||
String n = bit.getName();
|
||||
if (n != null) {
|
||||
if (nsrc != null) {
|
||||
if (!n.startsWith(nsrc)) continue;
|
||||
if (!n.startsWith(nsrc))
|
||||
continue;
|
||||
}
|
||||
}
|
||||
String description = bit.getDescription();
|
||||
//If anything other than a generated thumbnail is found, halt processing
|
||||
// If anything other than a generated thumbnail
|
||||
// is found, halt processing
|
||||
if (description != null) {
|
||||
if (replaceRegex.matcher(description).matches()) {
|
||||
if (MediaFilterManager.isVerbose) {
|
||||
System.out.println(description + " " + nsrc + " matches pattern and is replacable.");
|
||||
System.out.println(description + " " + nsrc
|
||||
+ " matches pattern and is replacable.");
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (description.equals(bitstreamDescription)) {
|
||||
if (MediaFilterManager.isVerbose) {
|
||||
System.out.println(bitstreamDescription + " " + nsrc + " is replacable.");
|
||||
System.out.println(bitstreamDescription + " " + nsrc
|
||||
+ " is replacable.");
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
System.out.println("Custom Thumbnail exists for " + nsrc + " for item " + item.getHandle() + ". Thumbnail will not be generated. ");
|
||||
System.out.println("Custom Thumbnail exists for " + nsrc + " for item "
|
||||
+ item.getHandle() + ". Thumbnail will not be generated. ");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return true; //assume that the thumbnail is a custom one
|
||||
return true; // assume that the thumbnail is a custom one
|
||||
}
|
||||
|
||||
public String[] getInputMIMETypes()
|
||||
{
|
||||
return ImageIO.getReaderMIMETypes();
|
||||
}
|
||||
|
||||
public String[] getInputDescriptions()
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
public String[] getInputExtensions()
|
||||
{
|
||||
return ImageIO.getReaderFileSuffixes();
|
||||
}
|
||||
}
|
||||
|
@@ -151,8 +151,11 @@ public abstract class AbstractGenerator
|
||||
* if an error occurs writing
|
||||
*/
|
||||
public int finish() throws IOException
|
||||
{
|
||||
if (null != currentOutput)
|
||||
{
|
||||
closeCurrentFile();
|
||||
}
|
||||
|
||||
OutputStream fo = new FileOutputStream(new File(outputDir,
|
||||
getIndexFilename()));
|
||||
|
@@ -92,7 +92,7 @@ public class CreateStatReport {
|
||||
|
||||
// create context as super user
|
||||
context = new Context();
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
//get paths to directories
|
||||
outputLogDirectory = ConfigurationManager.getProperty("log.dir") + File.separator;
|
||||
|
@@ -215,7 +215,7 @@ public class LogAnalyser
|
||||
|
||||
// create context as super user
|
||||
Context context = new Context();
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
// set up our command line variables
|
||||
String myLogDir = null;
|
||||
|
@@ -151,7 +151,7 @@ public class ReportGenerator
|
||||
{
|
||||
// create context as super user
|
||||
Context context = new Context();
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
String myFormat = null;
|
||||
String myInput = null;
|
||||
|
@@ -7,37 +7,31 @@
|
||||
*/
|
||||
package org.dspace.app.util;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import org.dspace.authorize.AuthorizeManager;
|
||||
import org.dspace.content.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.Enumeration;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeManager;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.Metadatum;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.handle.HandleManager;
|
||||
|
||||
import org.jdom.Element;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.sql.SQLException;
|
||||
import java.util.*;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
/**
|
||||
* Configuration and mapping for Google Scholar output metadata
|
||||
* @author Sands Fish
|
||||
@@ -125,6 +119,7 @@ public class GoogleMetadata
|
||||
|
||||
private static final int ALL_FIELDS_IN_OPTION = 2;
|
||||
|
||||
private Context ourContext;
|
||||
// Load configured fields from google-metadata.properties
|
||||
static
|
||||
{
|
||||
@@ -216,7 +211,11 @@ public class GoogleMetadata
|
||||
// Hold onto the item in case we need to refresh a stale parse
|
||||
this.item = item;
|
||||
itemURL = HandleManager.resolveToURL(context, item.getHandle());
|
||||
ourContext=context;
|
||||
EPerson currentUser = ourContext.getCurrentUser();
|
||||
ourContext.setCurrentUser(null);
|
||||
parseItem();
|
||||
ourContext.setCurrentUser(currentUser);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -757,16 +756,17 @@ public class GoogleMetadata
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch all metadata mappings
|
||||
* Fetch retaining the order of the values for any given key in which they
|
||||
* where added (like authors).
|
||||
*
|
||||
* Usage: GoogleMetadata gmd = new GoogleMetadata(item); for(Entry<String,
|
||||
* String> mapping : googlemd.getMappings()) { ... }
|
||||
*
|
||||
* @return Iterable of metadata fields mapped to Google-formatted values
|
||||
*/
|
||||
public Set<Entry<String, String>> getMappings()
|
||||
public Collection<Entry<String, String>> getMappings()
|
||||
{
|
||||
return new HashSet<>(metadataMappings.entries());
|
||||
return metadataMappings.entries();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1041,7 +1041,6 @@ public class GoogleMetadata
|
||||
*/
|
||||
private Bitstream findLinkableFulltext(Item item) throws SQLException {
|
||||
Bitstream bestSoFar = null;
|
||||
int bitstreamCount = 0;
|
||||
Bundle[] contentBundles = item.getBundles("ORIGINAL");
|
||||
for (Bundle bundle : contentBundles) {
|
||||
int primaryBitstreamId = bundle.getPrimaryBitstreamID();
|
||||
@@ -1051,14 +1050,14 @@ public class GoogleMetadata
|
||||
if (isPublic(candidate)) {
|
||||
return candidate;
|
||||
}
|
||||
} else if (bestSoFar == null) {
|
||||
} else
|
||||
{
|
||||
|
||||
if (bestSoFar == null && isPublic(candidate)) { //if bestSoFar is null but the candidate is not public you don't use it and try to find another
|
||||
bestSoFar = candidate;
|
||||
}
|
||||
bitstreamCount++;
|
||||
}
|
||||
}
|
||||
if (bitstreamCount > 1 || !isPublic(bestSoFar)) {
|
||||
bestSoFar = null;
|
||||
}
|
||||
|
||||
return bestSoFar;
|
||||
@@ -1069,16 +1068,10 @@ public class GoogleMetadata
|
||||
return false;
|
||||
}
|
||||
boolean result = false;
|
||||
Context context = null;
|
||||
try {
|
||||
context = new Context();
|
||||
result = AuthorizeManager.authorizeActionBoolean(context, bitstream, Constants.READ, true);
|
||||
result = AuthorizeManager.authorizeActionBoolean(ourContext, bitstream, Constants.READ, true);
|
||||
} catch (SQLException e) {
|
||||
log.error("Cannot determine whether bitstream is public, assuming it isn't. bitstream_id=" + bitstream.getID(), e);
|
||||
} finally {
|
||||
if (context != null) {
|
||||
context.abort();
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
@@ -279,11 +279,11 @@ public class LDAPAuthentication
|
||||
{
|
||||
log.info(LogManager.getHeader(context,
|
||||
"type=ldap-login", "type=ldap_but_already_email"));
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
eperson.setNetid(netid.toLowerCase());
|
||||
eperson.update();
|
||||
context.commit();
|
||||
context.setIgnoreAuthorization(false);
|
||||
context.restoreAuthSystemState();
|
||||
context.setCurrentUser(eperson);
|
||||
|
||||
// assign user to groups based on ldap dn
|
||||
@@ -298,7 +298,7 @@ public class LDAPAuthentication
|
||||
// TEMPORARILY turn off authorisation
|
||||
try
|
||||
{
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
eperson = EPerson.create(context);
|
||||
if (StringUtils.isNotEmpty(email))
|
||||
{
|
||||
@@ -332,7 +332,7 @@ public class LDAPAuthentication
|
||||
}
|
||||
finally
|
||||
{
|
||||
context.setIgnoreAuthorization(false);
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
log.info(LogManager.getHeader(context, "authenticate",
|
||||
@@ -354,7 +354,7 @@ public class LDAPAuthentication
|
||||
}
|
||||
finally
|
||||
{
|
||||
context.setIgnoreAuthorization(false);
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -7,11 +7,7 @@
|
||||
*/
|
||||
package org.dspace.authenticate;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
@@ -20,6 +16,10 @@ import org.dspace.core.LogManager;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.Group;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.sql.SQLException;
|
||||
|
||||
/**
|
||||
* A stackable authentication method
|
||||
* based on the DSpace internal "EPerson" database.
|
||||
@@ -128,7 +128,7 @@ public class PasswordAuthentication
|
||||
// ensures they are password users
|
||||
try
|
||||
{
|
||||
if (context.getCurrentUser().getPasswordHash() != null && !context.getCurrentUser().getPasswordHash().toString().equals(""))
|
||||
if (context.getCurrentUser() != null && context.getCurrentUser().getPasswordHash()!=null && StringUtils.isNotBlank(context.getCurrentUser().getPasswordHash().toString()))
|
||||
{
|
||||
String groupName = ConfigurationManager.getProperty("authentication-password", "login.specialgroup");
|
||||
if ((groupName != null) && (!groupName.trim().equals("")))
|
||||
@@ -149,7 +149,7 @@ public class PasswordAuthentication
|
||||
}
|
||||
}
|
||||
catch (Exception e) {
|
||||
// The user is not a password user, so we don't need to worry about them
|
||||
log.error(LogManager.getHeader(context,"getSpecialGroups",""),e);
|
||||
}
|
||||
return new int[0];
|
||||
}
|
||||
|
@@ -612,7 +612,7 @@ public class X509Authentication implements AuthenticationMethod
|
||||
"from=x.509, email=" + email));
|
||||
|
||||
// TEMPORARILY turn off authorisation
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
eperson = EPerson.create(context);
|
||||
eperson.setEmail(email);
|
||||
eperson.setCanLogIn(true);
|
||||
@@ -620,7 +620,7 @@ public class X509Authentication implements AuthenticationMethod
|
||||
eperson);
|
||||
eperson.update();
|
||||
context.commit();
|
||||
context.setIgnoreAuthorization(false);
|
||||
context.restoreAuthSystemState();
|
||||
context.setCurrentUser(eperson);
|
||||
setSpecialGroupsFlag(request, email);
|
||||
return SUCCESS;
|
||||
|
@@ -20,6 +20,7 @@ import org.dspace.eperson.Group;
|
||||
import org.dspace.storage.rdbms.DatabaseManager;
|
||||
import org.dspace.storage.rdbms.TableRow;
|
||||
import org.dspace.storage.rdbms.TableRowIterator;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
|
||||
/**
|
||||
* AuthorizeManager handles all authorization checks for DSpace. For better
|
||||
@@ -295,8 +296,43 @@ public class AuthorizeManager
|
||||
}
|
||||
}
|
||||
|
||||
// In case the dso is an bundle or bitstream we must ignore custom
|
||||
// policies if it does not belong to at least one installed item (see
|
||||
// DS-2614).
|
||||
// In case the dso is an item and a corresponding workspace or workflow
|
||||
// item exist, we have to ignore custom policies (see DS-2614).
|
||||
boolean ignoreCustomPolicies = false;
|
||||
if (o instanceof Bitstream)
|
||||
{
|
||||
Bitstream b = (Bitstream) o;
|
||||
|
||||
// Ensure that this is not a collection or community logo
|
||||
DSpaceObject parent = b.getParentObject();
|
||||
if (!(parent instanceof Collection) && !(parent instanceof Community))
|
||||
{
|
||||
ignoreCustomPolicies = !isAnyItemInstalled(c, b.getBundles());
|
||||
}
|
||||
}
|
||||
if (o instanceof Bundle)
|
||||
{
|
||||
ignoreCustomPolicies = !isAnyItemInstalled(c, new Bundle[] {(Bundle) o});
|
||||
}
|
||||
if (o instanceof Item)
|
||||
{
|
||||
if (WorkspaceItem.findByItem(c, (Item) o) != null ||
|
||||
WorkflowItem.findByItem(c, (Item) o) != null)
|
||||
{
|
||||
ignoreCustomPolicies = true;
|
||||
}
|
||||
}
|
||||
|
||||
for (ResourcePolicy rp : getPoliciesActionFilter(c, o, action))
|
||||
{
|
||||
if (ignoreCustomPolicies
|
||||
&& ResourcePolicy.TYPE_CUSTOM.equals(rp.getRpType()))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
// check policies for date validity
|
||||
if (rp.isDateValid())
|
||||
{
|
||||
@@ -306,7 +342,7 @@ public class AuthorizeManager
|
||||
}
|
||||
|
||||
if ((rp.getGroupID() != -1)
|
||||
&& (Group.isMember(c, rp.getGroupID())))
|
||||
&& (Group.isMember(c, e, rp.getGroupID())))
|
||||
{
|
||||
// group was set, and eperson is a member
|
||||
// of that group
|
||||
@@ -319,6 +355,25 @@ public class AuthorizeManager
|
||||
return false;
|
||||
}
|
||||
|
||||
// check whether any bundle belongs to any item that passed submission
|
||||
// and workflow process
|
||||
protected static boolean isAnyItemInstalled(Context ctx, Bundle[] bundles)
|
||||
throws SQLException
|
||||
{
|
||||
for (Bundle bundle : bundles)
|
||||
{
|
||||
for (Item item : bundle.getItems())
|
||||
{
|
||||
if (WorkspaceItem.findByItem(ctx, item) == null
|
||||
&& WorkflowItem.findByItem(ctx, item) == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////
|
||||
// admin check methods
|
||||
///////////////////////////////////////////////
|
||||
@@ -480,7 +535,9 @@ public class AuthorizeManager
|
||||
|
||||
rp.update();
|
||||
|
||||
c.turnOffAuthorisationSystem();
|
||||
o.updateLastModified();
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -535,7 +592,9 @@ public class AuthorizeManager
|
||||
|
||||
rp.update();
|
||||
|
||||
c.turnOffAuthorisationSystem();
|
||||
o.updateLastModified();
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -799,7 +858,9 @@ public class AuthorizeManager
|
||||
drp.update();
|
||||
}
|
||||
|
||||
c.turnOffAuthorisationSystem();
|
||||
dest.updateLastModified();
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -815,12 +876,14 @@ public class AuthorizeManager
|
||||
public static void removeAllPolicies(Context c, DSpaceObject o)
|
||||
throws SQLException
|
||||
{
|
||||
o.updateLastModified();
|
||||
|
||||
// FIXME: authorization check?
|
||||
DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE "
|
||||
+ "resource_type_id= ? AND resource_id= ? ",
|
||||
o.getType(), o.getID());
|
||||
|
||||
c.turnOffAuthorisationSystem();
|
||||
o.updateLastModified();
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -837,7 +900,7 @@ public class AuthorizeManager
|
||||
throws SQLException
|
||||
{
|
||||
DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE "
|
||||
+ "resource_type_id= ? AND resource_id= ? AND rptype <> ? ",
|
||||
+ "resource_type_id= ? AND resource_id= ? AND (rptype <> ? OR rptype IS NULL)",
|
||||
o.getType(), o.getID(), type);
|
||||
}
|
||||
|
||||
@@ -862,6 +925,29 @@ public class AuthorizeManager
|
||||
o.getType(), o.getID(), type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Change all the policies related to the action (fromPolicy) of the
|
||||
* specified object to the new action (toPolicy)
|
||||
*
|
||||
* @param context
|
||||
* @param dso
|
||||
* the dspace object
|
||||
* @param fromAction
|
||||
* the action to change
|
||||
* @param toAction
|
||||
* the new action to set
|
||||
* @throws SQLException
|
||||
* @throws AuthorizeException
|
||||
*/
|
||||
public static void switchPoliciesAction(Context context, DSpaceObject dso, int fromAction, int toAction)
|
||||
throws SQLException, AuthorizeException {
|
||||
List<ResourcePolicy> rps = getPoliciesActionFilter(context, dso, fromAction);
|
||||
for (ResourcePolicy rp : rps) {
|
||||
rp.setAction(toAction);
|
||||
rp.update();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all policies from an object that match a given action. FIXME
|
||||
* doesn't check authorization
|
||||
@@ -879,7 +965,6 @@ public class AuthorizeManager
|
||||
public static void removePoliciesActionFilter(Context context,
|
||||
DSpaceObject dso, int actionID) throws SQLException
|
||||
{
|
||||
dso.updateLastModified();
|
||||
if (actionID == -1)
|
||||
{
|
||||
// remove all policies from object
|
||||
@@ -891,6 +976,10 @@ public class AuthorizeManager
|
||||
"resource_id= ? AND action_id= ? ",
|
||||
dso.getType(), dso.getID(), actionID);
|
||||
}
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
dso.updateLastModified();
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -927,11 +1016,13 @@ public class AuthorizeManager
|
||||
public static void removeGroupPolicies(Context c, DSpaceObject o, Group g)
|
||||
throws SQLException
|
||||
{
|
||||
o.updateLastModified();
|
||||
|
||||
DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE "
|
||||
+ "resource_type_id= ? AND resource_id= ? AND epersongroup_id= ? ",
|
||||
o.getType(), o.getID(), g.getID());
|
||||
|
||||
c.turnOffAuthorisationSystem();
|
||||
o.updateLastModified();
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -950,10 +1041,13 @@ public class AuthorizeManager
|
||||
public static void removeEPersonPolicies(Context c, DSpaceObject o, EPerson e)
|
||||
throws SQLException
|
||||
{
|
||||
o.updateLastModified();
|
||||
DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE "
|
||||
+ "resource_type_id= ? AND resource_id= ? AND eperson_id= ? ",
|
||||
o.getType(), o.getID(), e.getID());
|
||||
|
||||
c.turnOffAuthorisationSystem();
|
||||
o.updateLastModified();
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -36,7 +36,7 @@ public class FixDefaultPolicies
|
||||
Context c = new Context();
|
||||
|
||||
// turn off authorization
|
||||
c.setIgnoreAuthorization(true);
|
||||
c.turnOffAuthorisationSystem();
|
||||
|
||||
//////////////////////
|
||||
// carnage begins here
|
||||
|
@@ -63,7 +63,7 @@ public class PolicySet
|
||||
Context c = new Context();
|
||||
|
||||
// turn off authorization
|
||||
c.setIgnoreAuthorization(true);
|
||||
c.turnOffAuthorisationSystem();
|
||||
|
||||
//////////////////////
|
||||
// carnage begins here
|
||||
|
@@ -354,7 +354,7 @@ public class BrowserScope
|
||||
*/
|
||||
public void setResultsPerPage(int resultsPerPage)
|
||||
{
|
||||
if (resultsPerPage > -1 || browseIndex.isTagCloudEnabled())
|
||||
if (resultsPerPage > -1 || (browseIndex != null && browseIndex.isTagCloudEnabled()))
|
||||
{
|
||||
this.resultsPerPage = resultsPerPage;
|
||||
}
|
||||
|
@@ -32,6 +32,8 @@ import org.dspace.utils.DSpace;
|
||||
*
|
||||
* @author Andrea Bollini (CILEA)
|
||||
* @author Adán Román Ruiz at arvo.es (bugfix)
|
||||
* @author Panagiotis Koutsourakis (National Documentation Centre) (bugfix)
|
||||
* @author Kostas Stamatis (National Documentation Centre) (bugfix)
|
||||
*
|
||||
*/
|
||||
public class SolrBrowseDAO implements BrowseDAO
|
||||
@@ -336,6 +338,22 @@ public class SolrBrowseDAO implements BrowseDAO
|
||||
addStatusFilter(query);
|
||||
query.setMaxResults(0);
|
||||
query.addFilterQueries("search.resourcetype:" + Constants.ITEM);
|
||||
|
||||
// We need to take into account the fact that we may be in a subset of the items
|
||||
if (authority != null)
|
||||
{
|
||||
query.addFilterQueries("{!field f="+facetField + "_authority_filter}"
|
||||
+ authority);
|
||||
}
|
||||
else if (this.value != null && !valuePartial)
|
||||
{
|
||||
query.addFilterQueries("{!field f="+facetField + "_value_filter}" + this.value);
|
||||
}
|
||||
else if (valuePartial)
|
||||
{
|
||||
query.addFilterQueries("{!field f="+facetField + "_partial}" + this.value);
|
||||
}
|
||||
|
||||
if (isAscending)
|
||||
{
|
||||
query.setQuery("bi_"+column + "_sort" + ": [* TO \"" + value + "\"}");
|
||||
@@ -343,6 +361,7 @@ public class SolrBrowseDAO implements BrowseDAO
|
||||
else
|
||||
{
|
||||
query.setQuery("bi_" + column + "_sort" + ": {\"" + value + "\" TO *]");
|
||||
query.addFilterQueries("-(bi_" + column + "_sort" + ":" + value + "*)");
|
||||
}
|
||||
boolean includeUnDiscoverable = itemsWithdrawn || !itemsDiscoverable;
|
||||
DiscoverResult resp = null;
|
||||
|
@@ -28,6 +28,7 @@ import org.dspace.workflow.WorkflowItem;
|
||||
import org.dspace.xmlworkflow.storedcomponents.CollectionRole;
|
||||
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.sql.PreparedStatement;
|
||||
@@ -47,7 +48,6 @@ import java.util.*;
|
||||
* effect.
|
||||
*
|
||||
* @author Robert Tansley
|
||||
* @version $Revision$
|
||||
*/
|
||||
public class Collection extends DSpaceObject
|
||||
{
|
||||
@@ -294,31 +294,48 @@ public class Collection extends DSpaceObject
|
||||
* @return the collections in the system
|
||||
* @throws SQLException
|
||||
*/
|
||||
public static Collection[] findAll(Context context) throws SQLException {
|
||||
public static Collection[] findAll(Context context) throws SQLException
|
||||
{
|
||||
TableRowIterator tri = null;
|
||||
try {
|
||||
String query = "SELECT c.* FROM collection c " +
|
||||
"LEFT JOIN metadatavalue m on (m.resource_id = c.collection_id and m.resource_type_id = ? and m.metadata_field_id = ?) ";
|
||||
if(DatabaseManager.isOracle()){
|
||||
query += " ORDER BY cast(m.text_value as varchar2(128))";
|
||||
}else{
|
||||
query += " ORDER BY m.text_value";
|
||||
}
|
||||
|
||||
tri = DatabaseManager.query(context,
|
||||
query,
|
||||
Constants.COLLECTION,
|
||||
MetadataField.findByElement(context, MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(), "title", null).getFieldID()
|
||||
List<Collection> collections = null;
|
||||
List<Serializable> params = new ArrayList<Serializable>();
|
||||
StringBuffer query = new StringBuffer(
|
||||
"SELECT c.*" +
|
||||
"FROM collection c " +
|
||||
"LEFT JOIN metadatavalue m ON (" +
|
||||
"m.resource_id = c.collection_id AND " +
|
||||
"m.resource_type_id = ? AND " +
|
||||
"m.metadata_field_id = ?" +
|
||||
")"
|
||||
);
|
||||
} catch (SQLException e) {
|
||||
log.error("Find all Collections - ",e);
|
||||
throw e;
|
||||
|
||||
if (DatabaseManager.isOracle())
|
||||
{
|
||||
query.append(" ORDER BY cast(m.text_value as varchar2(128))");
|
||||
}
|
||||
else
|
||||
{
|
||||
query.append(" ORDER BY m.text_value");
|
||||
}
|
||||
|
||||
List<Collection> collections = new ArrayList<Collection>();
|
||||
params.add(Constants.COLLECTION);
|
||||
params.add(
|
||||
MetadataField.findByElement(
|
||||
context,
|
||||
MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(),
|
||||
"title",
|
||||
null
|
||||
).getFieldID()
|
||||
);
|
||||
|
||||
try
|
||||
{
|
||||
tri = DatabaseManager.query(
|
||||
context, query.toString(), params.toArray()
|
||||
);
|
||||
|
||||
collections = new ArrayList<Collection>();
|
||||
|
||||
while (tri.hasNext())
|
||||
{
|
||||
TableRow row = tri.next();
|
||||
@@ -337,6 +354,11 @@ public class Collection extends DSpaceObject
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (SQLException e)
|
||||
{
|
||||
log.error("Find all Collections - ", e);
|
||||
throw e;
|
||||
}
|
||||
finally
|
||||
{
|
||||
// close the TableRowIterator to free up resources
|
||||
@@ -363,31 +385,47 @@ public class Collection extends DSpaceObject
|
||||
public static Collection[] findAll(Context context, Integer limit, Integer offset) throws SQLException
|
||||
{
|
||||
TableRowIterator tri = null;
|
||||
try{
|
||||
String query = "SELECT c.* FROM collection c " +
|
||||
"LEFT JOIN metadatavalue m on (m.resource_id = c.collection_id and m.resource_type_id = ? and m.metadata_field_id = ?) ";
|
||||
|
||||
if(DatabaseManager.isOracle()){
|
||||
query += " ORDER BY cast(m.text_value as varchar2(128))";
|
||||
}else{
|
||||
query += " ORDER BY m.text_value";
|
||||
}
|
||||
query += " limit ? offset ?";
|
||||
tri = DatabaseManager.query(context,
|
||||
query,
|
||||
Constants.COLLECTION,
|
||||
MetadataField.findByElement(context, MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(), "title", null).getFieldID(),
|
||||
limit,
|
||||
offset
|
||||
List<Collection> collections = null;
|
||||
List<Serializable> params = new ArrayList<Serializable>();
|
||||
StringBuffer query = new StringBuffer(
|
||||
"SELECT c.*" +
|
||||
"FROM collection c " +
|
||||
"LEFT JOIN metadatavalue m ON (" +
|
||||
"m.resource_id = c.collection_id AND " +
|
||||
"m.resource_type_id = ? AND " +
|
||||
"m.metadata_field_id = ?" +
|
||||
")"
|
||||
);
|
||||
} catch (SQLException e) {
|
||||
log.error("Find all Collections offset/limit - ",e);
|
||||
throw e;
|
||||
|
||||
if (DatabaseManager.isOracle())
|
||||
{
|
||||
query.append(" ORDER BY cast(m.text_value as varchar2(128))");
|
||||
}
|
||||
List<Collection> collections = new ArrayList<Collection>();
|
||||
else
|
||||
{
|
||||
query.append(" ORDER BY m.text_value");
|
||||
}
|
||||
|
||||
params.add(Constants.COLLECTION);
|
||||
params.add(
|
||||
MetadataField.findByElement(
|
||||
context,
|
||||
MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(),
|
||||
"title",
|
||||
null
|
||||
).getFieldID()
|
||||
);
|
||||
|
||||
DatabaseManager.applyOffsetAndLimit(query, params, offset, limit);
|
||||
|
||||
try
|
||||
{
|
||||
tri = DatabaseManager.query(
|
||||
context, query.toString(), params.toArray()
|
||||
);
|
||||
|
||||
collections = new ArrayList<Collection>();
|
||||
|
||||
while (tri.hasNext())
|
||||
{
|
||||
TableRow row = tri.next();
|
||||
@@ -406,6 +444,11 @@ public class Collection extends DSpaceObject
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (SQLException e)
|
||||
{
|
||||
log.error("Find all Collections offset/limit - ", e);
|
||||
throw e;
|
||||
}
|
||||
finally
|
||||
{
|
||||
// close the TableRowIterator to free up resources
|
||||
@@ -450,13 +493,20 @@ public class Collection extends DSpaceObject
|
||||
*/
|
||||
public ItemIterator getItems(Integer limit, Integer offset) throws SQLException
|
||||
{
|
||||
String myQuery = "SELECT item.* FROM item, collection2item WHERE "
|
||||
+ "item.item_id=collection2item.item_id AND "
|
||||
+ "collection2item.collection_id= ? "
|
||||
+ "AND item.in_archive='1' limit ? offset ?";
|
||||
List<Serializable> params = new ArrayList<Serializable>();
|
||||
StringBuffer myQuery = new StringBuffer(
|
||||
"SELECT item.* " +
|
||||
"FROM item, collection2item " +
|
||||
"WHERE item.item_id = collection2item.item_id " +
|
||||
"AND collection2item.collection_id = ? " +
|
||||
"AND item.in_archive = '1'"
|
||||
);
|
||||
|
||||
TableRowIterator rows = DatabaseManager.queryTable(ourContext, "item",
|
||||
myQuery,getID(), limit, offset);
|
||||
params.add(getID());
|
||||
DatabaseManager.applyOffsetAndLimit(myQuery, params, offset, limit);
|
||||
|
||||
TableRowIterator rows = DatabaseManager.query(ourContext,
|
||||
myQuery.toString(), params.toArray());
|
||||
|
||||
return new ItemIterator(ourContext, rows);
|
||||
}
|
||||
@@ -679,8 +729,6 @@ public class Collection extends DSpaceObject
|
||||
g.setName("COLLECTION_" + getID() + "_WORKFLOW_STEP_" + step);
|
||||
g.update();
|
||||
setWorkflowGroup(step, g);
|
||||
|
||||
AuthorizeManager.addPolicy(ourContext, this, Constants.ADD, g);
|
||||
}
|
||||
|
||||
return workflowGroup[step - 1];
|
||||
@@ -689,26 +737,82 @@ public class Collection extends DSpaceObject
|
||||
/**
|
||||
* Set the workflow group corresponding to a particular workflow step.
|
||||
* <code>null</code> can be passed in if there should be no associated
|
||||
* group for that workflow step; any existing group is NOT deleted.
|
||||
* group for that workflow step. Any existing group is NOT deleted.
|
||||
*
|
||||
* @param step
|
||||
* the workflow step (1-3)
|
||||
* @param g
|
||||
* @param newGroup
|
||||
* the new workflow group, or <code>null</code>
|
||||
* @throws java.sql.SQLException passed through.
|
||||
* @throws org.dspace.authorize.AuthorizeException passed through.
|
||||
*/
|
||||
public void setWorkflowGroup(int step, Group g)
|
||||
public void setWorkflowGroup(int step, Group newGroup)
|
||||
throws SQLException, AuthorizeException
|
||||
{
|
||||
workflowGroup[step - 1] = g;
|
||||
|
||||
if (g == null)
|
||||
Group oldGroup = getWorkflowGroup(step);
|
||||
String stepColumn;
|
||||
int action;
|
||||
switch(step)
|
||||
{
|
||||
collectionRow.setColumnNull("workflow_step_" + step);
|
||||
case 1:
|
||||
action = Constants.WORKFLOW_STEP_1;
|
||||
stepColumn = "workflow_step_1";
|
||||
break;
|
||||
case 2:
|
||||
action = Constants.WORKFLOW_STEP_2;
|
||||
stepColumn = "workflow_step_2";
|
||||
break;
|
||||
case 3:
|
||||
action = Constants.WORKFLOW_STEP_3;
|
||||
stepColumn = "workflow_step_3";
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Illegal step count: " + step);
|
||||
}
|
||||
workflowGroup[step-1] = newGroup;
|
||||
if (newGroup != null)
|
||||
collectionRow.setColumn(stepColumn, newGroup.getID());
|
||||
else
|
||||
{
|
||||
collectionRow.setColumn("workflow_step_" + step, g.getID());
|
||||
}
|
||||
collectionRow.setColumnNull(stepColumn);
|
||||
modified = true;
|
||||
|
||||
// Deal with permissions.
|
||||
try {
|
||||
ourContext.turnOffAuthorisationSystem();
|
||||
// remove the policies for the old group
|
||||
if (oldGroup != null)
|
||||
{
|
||||
List<ResourcePolicy> oldPolicies = AuthorizeManager
|
||||
.getPoliciesActionFilter(ourContext, this, action);
|
||||
int oldGroupID = oldGroup.getID();
|
||||
for (ResourcePolicy rp : oldPolicies)
|
||||
{
|
||||
if (rp.getGroupID() == oldGroupID)
|
||||
rp.delete();
|
||||
}
|
||||
|
||||
oldPolicies = AuthorizeManager
|
||||
.getPoliciesActionFilter(ourContext, this, Constants.ADD);
|
||||
for (ResourcePolicy rp : oldPolicies)
|
||||
{
|
||||
if ((rp.getGroupID() == oldGroupID)
|
||||
&& ResourcePolicy.TYPE_WORKFLOW.equals(rp.getRpType()))
|
||||
rp.delete();
|
||||
}
|
||||
}
|
||||
|
||||
// New group can be null to delete workflow step.
|
||||
// We need to grant permissions if new group is not null.
|
||||
if (newGroup != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(ourContext, this, action, newGroup,
|
||||
ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(ourContext, this, Constants.ADD, newGroup,
|
||||
ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
} finally {
|
||||
ourContext.restoreAuthSystemState();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1513,7 +1617,7 @@ public class Collection extends DSpaceObject
|
||||
|
||||
public static Collection[] findAuthorizedOptimized(Context context, int actionID) throws java.sql.SQLException
|
||||
{
|
||||
if(! ConfigurationManager.getBooleanProperty("org.dspace.content.Collection.findAuthorizedPerformanceOptimize", true)) {
|
||||
if(! ConfigurationManager.getBooleanProperty("org.dspace.content.Collection.findAuthorizedPerformanceOptimize", false)) {
|
||||
// Fallback to legacy query if config says so. The rationale could be that a site found a bug.
|
||||
return findAuthorized(context, null, actionID);
|
||||
}
|
||||
|
@@ -280,7 +280,7 @@ public class Community extends DSpaceObject
|
||||
{
|
||||
while (tri.hasNext())
|
||||
{
|
||||
TableRow row = tri.next();
|
||||
TableRow row = tri.next(context);
|
||||
|
||||
// First check the cache
|
||||
Community fromCache = (Community) context.fromCache(
|
||||
@@ -350,7 +350,7 @@ public class Community extends DSpaceObject
|
||||
{
|
||||
while (tri.hasNext())
|
||||
{
|
||||
TableRow row = tri.next();
|
||||
TableRow row = tri.next(context);
|
||||
|
||||
// First check the cache
|
||||
Community fromCache = (Community) context.fromCache(
|
||||
@@ -683,7 +683,7 @@ public class Community extends DSpaceObject
|
||||
{
|
||||
while (tri.hasNext())
|
||||
{
|
||||
TableRow row = tri.next();
|
||||
TableRow row = tri.next(ourContext);
|
||||
|
||||
// First check the cache
|
||||
Collection fromCache = (Collection) ourContext.fromCache(
|
||||
@@ -757,7 +757,7 @@ public class Community extends DSpaceObject
|
||||
{
|
||||
while (tri.hasNext())
|
||||
{
|
||||
TableRow row = tri.next();
|
||||
TableRow row = tri.next(ourContext);
|
||||
|
||||
// First check the cache
|
||||
Community fromCache = (Community) ourContext.fromCache(
|
||||
@@ -812,7 +812,7 @@ public class Community extends DSpaceObject
|
||||
{
|
||||
if (tri.hasNext())
|
||||
{
|
||||
TableRow row = tri.next();
|
||||
TableRow row = tri.next(ourContext);
|
||||
|
||||
// First check the cache
|
||||
Community fromCache = (Community) ourContext.fromCache(
|
||||
|
@@ -60,6 +60,8 @@ public class InstallItem
|
||||
IOException, AuthorizeException
|
||||
{
|
||||
Item item = is.getItem();
|
||||
Collection collection = is.getCollection();
|
||||
|
||||
IdentifierService identifierService = new DSpace().getSingletonService(IdentifierService.class);
|
||||
try {
|
||||
if(suppliedHandle == null)
|
||||
@@ -74,7 +76,15 @@ public class InstallItem
|
||||
|
||||
populateMetadata(c, item);
|
||||
|
||||
return finishItem(c, item, is);
|
||||
// Finish up / archive the item
|
||||
item = finishItem(c, item, is);
|
||||
|
||||
// As this is a BRAND NEW item, as a final step we need to remove the
|
||||
// submitter item policies created during deposit and replace them with
|
||||
// the default policies from the collection.
|
||||
item.inheritCollectionDefaultPolicies(collection);
|
||||
|
||||
return item;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -205,8 +215,18 @@ public class InstallItem
|
||||
item.addDC("description", "provenance", "en", provDescription);
|
||||
}
|
||||
|
||||
// final housekeeping when adding new Item to archive
|
||||
// common between installing and "restoring" items.
|
||||
/**
|
||||
* Final housekeeping when adding a new Item into the archive.
|
||||
* This method is used by *both* installItem() and restoreItem(),
|
||||
* so all actions here will be run for a newly added item or a restored item.
|
||||
*
|
||||
* @param c DSpace Context
|
||||
* @param item Item in question
|
||||
* @param is InProgressSubmission object
|
||||
* @return final "archived" Item
|
||||
* @throws SQLException if database error
|
||||
* @throws AuthorizeException if authorization error
|
||||
*/
|
||||
private static Item finishItem(Context c, Item item, InProgressSubmission is)
|
||||
throws SQLException, IOException, AuthorizeException
|
||||
{
|
||||
@@ -229,10 +249,6 @@ public class InstallItem
|
||||
// remove in-progress submission
|
||||
is.deleteWrapper();
|
||||
|
||||
// remove the item's policies and replace them with
|
||||
// the defaults from the collection
|
||||
item.inheritCollectionDefaultPolicies(is.getCollection());
|
||||
|
||||
// set embargo lift date and take away read access if indicated.
|
||||
EmbargoManager.setEmbargo(c, item);
|
||||
|
||||
|
@@ -24,14 +24,15 @@ import org.dspace.authorize.AuthorizeManager;
|
||||
import org.dspace.authorize.ResourcePolicy;
|
||||
import org.dspace.browse.BrowseException;
|
||||
import org.dspace.browse.IndexBrowse;
|
||||
import org.dspace.content.authority.ChoiceAuthorityManager;
|
||||
import org.dspace.content.authority.Choices;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.content.authority.Choices;
|
||||
import org.dspace.content.authority.ChoiceAuthorityManager;
|
||||
import org.dspace.event.Event;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.event.Event;
|
||||
import org.dspace.handle.HandleManager;
|
||||
import org.dspace.identifier.IdentifierException;
|
||||
import org.dspace.identifier.IdentifierService;
|
||||
@@ -40,6 +41,8 @@ import org.dspace.storage.rdbms.TableRow;
|
||||
import org.dspace.storage.rdbms.TableRowIterator;
|
||||
import org.dspace.utils.DSpace;
|
||||
import org.dspace.versioning.VersioningService;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
|
||||
|
||||
/**
|
||||
* Class representing an item in DSpace.
|
||||
@@ -263,7 +266,7 @@ public class Item extends DSpaceObject
|
||||
}
|
||||
|
||||
String query = "SELECT item.* FROM metadatavalue,item WHERE item.in_archive='1' " +
|
||||
"AND item.item_id = metadatavalue.item_id AND metadata_field_id = ?";
|
||||
"AND item.item_id = metadatavalue.resource_id AND metadatavalue.resource_type_id=2 AND metadata_field_id = ?";
|
||||
TableRowIterator rows = null;
|
||||
if (Item.ANY.equals(authority)) {
|
||||
rows = DatabaseManager.queryTable(context, "item", query, mdf.getFieldID());
|
||||
@@ -1158,8 +1161,14 @@ public class Item extends DSpaceObject
|
||||
ourContext.addEvent(new Event(Event.MODIFY, Constants.ITEM, getID(),
|
||||
"WITHDRAW", getIdentifiers(ourContext)));
|
||||
|
||||
// remove all authorization policies, saving the custom ones
|
||||
AuthorizeManager.removeAllPoliciesByDSOAndTypeNotEqualsTo(ourContext, this, ResourcePolicy.TYPE_CUSTOM);
|
||||
// switch all READ authorization policies to WITHDRAWN_READ
|
||||
AuthorizeManager.switchPoliciesAction(ourContext, this, Constants.READ, Constants.WITHDRAWN_READ);
|
||||
for (Bundle bnd : this.getBundles()) {
|
||||
AuthorizeManager.switchPoliciesAction(ourContext, bnd, Constants.READ, Constants.WITHDRAWN_READ);
|
||||
for (Bitstream bs : bnd.getBitstreams()) {
|
||||
AuthorizeManager.switchPoliciesAction(ourContext, bs, Constants.READ, Constants.WITHDRAWN_READ);
|
||||
}
|
||||
}
|
||||
|
||||
// Write log
|
||||
log.info(LogManager.getHeader(ourContext, "withdraw_item", "user="
|
||||
@@ -1217,14 +1226,26 @@ public class Item extends DSpaceObject
|
||||
ourContext.addEvent(new Event(Event.MODIFY, Constants.ITEM, getID(),
|
||||
"REINSTATE", getIdentifiers(ourContext)));
|
||||
|
||||
// restore all WITHDRAWN_READ authorization policies back to READ
|
||||
for (Bundle bnd : this.getBundles()) {
|
||||
AuthorizeManager.switchPoliciesAction(ourContext, bnd, Constants.WITHDRAWN_READ, Constants.READ);
|
||||
for (Bitstream bs : bnd.getBitstreams()) {
|
||||
AuthorizeManager.switchPoliciesAction(ourContext, bs, Constants.WITHDRAWN_READ, Constants.READ);
|
||||
}
|
||||
}
|
||||
|
||||
// check if the item was withdrawn before the fix DS-3097
|
||||
if (AuthorizeManager.getPoliciesActionFilter(ourContext, this, Constants.WITHDRAWN_READ).size() != 0) {
|
||||
AuthorizeManager.switchPoliciesAction(ourContext, this, Constants.WITHDRAWN_READ, Constants.READ);
|
||||
}
|
||||
else {
|
||||
// authorization policies
|
||||
if (colls.length > 0)
|
||||
{
|
||||
// FIXME: not multiple inclusion friendly - just apply access
|
||||
// policies from first collection
|
||||
// remove the item's policies and replace them with
|
||||
// the defaults from the collection
|
||||
inheritCollectionDefaultPolicies(colls[0]);
|
||||
adjustItemPolicies(getOwningCollection());
|
||||
}
|
||||
}
|
||||
|
||||
// Write log
|
||||
@@ -1750,8 +1771,13 @@ public class Item extends DSpaceObject
|
||||
// is this collection not yet created, and an item template is created
|
||||
if (getOwningCollection() == null)
|
||||
{
|
||||
if (!isInProgressSubmission()) {
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// is this person an COLLECTION_EDITOR for the owning collection?
|
||||
if (getOwningCollection().canEditBoolean(false))
|
||||
@@ -1762,6 +1788,20 @@ public class Item extends DSpaceObject
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the item is an inprogress submission
|
||||
* @param context
|
||||
* @param item
|
||||
* @return <code>true</code> if the item is an inprogress submission, i.e. a WorkspaceItem or WorkflowItem
|
||||
* @throws SQLException
|
||||
*/
|
||||
public boolean isInProgressSubmission() throws SQLException {
|
||||
return WorkspaceItem.findByItem(ourContext, this) != null ||
|
||||
((ConfigurationManager.getProperty("workflow", "workflow.framework").equals("xmlworkflow")
|
||||
&& XmlWorkflowItem.findByItem(ourContext, this) != null)
|
||||
|| WorkflowItem.findByItem(ourContext, this) != null);
|
||||
}
|
||||
|
||||
public String getName()
|
||||
{
|
||||
return getMetadataFirstValue(MetadataSchema.DC_SCHEMA, "title", null, Item.ANY);
|
||||
|
@@ -81,6 +81,7 @@ public class WorkspaceItem implements InProgressSubmission
|
||||
* ID of the workspace item
|
||||
*
|
||||
* @return the workspace item, or null if the ID is invalid.
|
||||
* @throws java.sql.SQLException passed through.
|
||||
*/
|
||||
public static WorkspaceItem find(Context context, int id)
|
||||
throws SQLException
|
||||
@@ -131,6 +132,9 @@ public class WorkspaceItem implements InProgressSubmission
|
||||
* of the collection's template item
|
||||
*
|
||||
* @return the newly created workspace item
|
||||
* @throws org.dspace.authorize.AuthorizeException passed through.
|
||||
* @throws java.sql.SQLException passed through.
|
||||
* @throws java.io.IOException passed through.
|
||||
*/
|
||||
public static WorkspaceItem create(Context c, Collection coll,
|
||||
boolean template) throws AuthorizeException, SQLException,
|
||||
@@ -140,98 +144,49 @@ public class WorkspaceItem implements InProgressSubmission
|
||||
AuthorizeManager.authorizeAction(c, coll, Constants.ADD);
|
||||
|
||||
// Create an item
|
||||
Item i = Item.create(c);
|
||||
i.setSubmitter(c.getCurrentUser());
|
||||
Item item = Item.create(c);
|
||||
item.setSubmitter(c.getCurrentUser());
|
||||
|
||||
// Now create the policies for the submitter and workflow
|
||||
// users to modify item and contents
|
||||
// Now create the policies for the submitter to modify item and contents.
|
||||
// contents = bitstreams, bundles
|
||||
// FIXME: icky hardcoded workflow steps
|
||||
Group step1group = coll.getWorkflowGroup(1);
|
||||
Group step2group = coll.getWorkflowGroup(2);
|
||||
Group step3group = coll.getWorkflowGroup(3);
|
||||
|
||||
EPerson e = c.getCurrentUser();
|
||||
EPerson submitter = c.getCurrentUser();
|
||||
|
||||
// read permission
|
||||
AuthorizeManager.addPolicy(c, i, Constants.READ, e, ResourcePolicy.TYPE_SUBMISSION);
|
||||
// Add policies for the submitter
|
||||
AuthorizeManager.addPolicy(c, item, Constants.READ, submitter, ResourcePolicy.TYPE_SUBMISSION);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.WRITE, submitter, ResourcePolicy.TYPE_SUBMISSION);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.ADD, submitter, ResourcePolicy.TYPE_SUBMISSION);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.REMOVE, submitter, ResourcePolicy.TYPE_SUBMISSION);
|
||||
|
||||
|
||||
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("originalworkflow")) {
|
||||
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("originalworkflow"))
|
||||
{
|
||||
// Add policies for the workflow step administrative groups
|
||||
if (step1group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.READ, step1group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.READ, step1group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.WRITE, step1group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.ADD, step1group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.REMOVE, step1group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
|
||||
if (step2group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.READ, step2group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.READ, step2group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.WRITE, step2group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.ADD, step2group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.REMOVE, step2group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
|
||||
if (step3group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.READ, step3group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// write permission
|
||||
AuthorizeManager.addPolicy(c, i, Constants.WRITE, e, ResourcePolicy.TYPE_SUBMISSION);
|
||||
|
||||
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("originalworkflow")) {
|
||||
if (step1group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.WRITE, step1group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
|
||||
if (step2group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.WRITE, step2group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
|
||||
if (step3group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.WRITE, step3group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
}
|
||||
|
||||
// add permission
|
||||
AuthorizeManager.addPolicy(c, i, Constants.ADD, e, ResourcePolicy.TYPE_SUBMISSION);
|
||||
|
||||
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("originalworkflow")) {
|
||||
if (step1group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.ADD, step1group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
|
||||
if (step2group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.ADD, step2group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
|
||||
if (step3group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.ADD, step3group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
}
|
||||
|
||||
// remove contents permission
|
||||
AuthorizeManager.addPolicy(c, i, Constants.REMOVE, e, ResourcePolicy.TYPE_SUBMISSION);
|
||||
|
||||
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("originalworkflow")) {
|
||||
if (step1group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.REMOVE, step1group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
|
||||
if (step2group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.REMOVE, step2group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
|
||||
if (step3group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.REMOVE, step3group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.READ, step3group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.WRITE, step3group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.ADD, step3group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.REMOVE, step3group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -244,22 +199,22 @@ public class WorkspaceItem implements InProgressSubmission
|
||||
|
||||
for (int n = 0; n < md.length; n++)
|
||||
{
|
||||
i.addMetadata(md[n].schema, md[n].element, md[n].qualifier, md[n].language,
|
||||
item.addMetadata(md[n].schema, md[n].element, md[n].qualifier, md[n].language,
|
||||
md[n].value);
|
||||
}
|
||||
}
|
||||
|
||||
i.update();
|
||||
item.update();
|
||||
|
||||
// Create the workspace item row
|
||||
TableRow row = DatabaseManager.row("workspaceitem");
|
||||
|
||||
row.setColumn("item_id", i.getID());
|
||||
row.setColumn("item_id", item.getID());
|
||||
row.setColumn("collection_id", coll.getID());
|
||||
|
||||
log.info(LogManager.getHeader(c, "create_workspace_item",
|
||||
"workspace_item_id=" + row.getIntColumn("workspace_item_id")
|
||||
+ "item_id=" + i.getID() + "collection_id="
|
||||
+ "item_id=" + item.getID() + "collection_id="
|
||||
+ coll.getID()));
|
||||
|
||||
DatabaseManager.insert(c, row);
|
||||
@@ -280,6 +235,7 @@ public class WorkspaceItem implements InProgressSubmission
|
||||
* the eperson
|
||||
*
|
||||
* @return the corresponding workspace items
|
||||
* @throws java.sql.SQLException passed through.
|
||||
*/
|
||||
public static WorkspaceItem[] findByEPerson(Context context, EPerson ep)
|
||||
throws SQLException
|
||||
@@ -332,6 +288,7 @@ public class WorkspaceItem implements InProgressSubmission
|
||||
* the collection
|
||||
*
|
||||
* @return the corresponding workspace items
|
||||
* @throws java.sql.SQLException passed through.
|
||||
*/
|
||||
public static WorkspaceItem[] findByCollection(Context context, Collection c)
|
||||
throws SQLException
|
||||
@@ -384,6 +341,7 @@ public class WorkspaceItem implements InProgressSubmission
|
||||
* the item
|
||||
*
|
||||
* @return workflow item corresponding to the item, or null
|
||||
* @throws java.sql.SQLException passed through.
|
||||
*/
|
||||
public static WorkspaceItem findByItem(Context context, Item i)
|
||||
throws SQLException
|
||||
@@ -408,6 +366,7 @@ public class WorkspaceItem implements InProgressSubmission
|
||||
* @param context the context object
|
||||
*
|
||||
* @return all workspace items
|
||||
* @throws java.sql.SQLException passed through.
|
||||
*/
|
||||
public static WorkspaceItem[] findAll(Context context)
|
||||
throws SQLException
|
||||
@@ -505,6 +464,8 @@ public class WorkspaceItem implements InProgressSubmission
|
||||
|
||||
/**
|
||||
* Update the workspace item, including the unarchived item.
|
||||
* @throws java.sql.SQLException passed through.
|
||||
* @throws org.dspace.authorize.AuthorizeException passed through.
|
||||
*/
|
||||
public void update() throws SQLException, AuthorizeException
|
||||
{
|
||||
@@ -554,6 +515,10 @@ public class WorkspaceItem implements InProgressSubmission
|
||||
* Delete the workspace item. The entry in workspaceitem, the unarchived
|
||||
* item and its contents are all removed (multiple inclusion
|
||||
* notwithstanding.)
|
||||
* @throws java.sql.SQLException passed through.
|
||||
* @throws org.dspace.authorize.AuthorizeException
|
||||
* if not original submitter or an administrator.
|
||||
* @throws java.io.IOException passed through.
|
||||
*/
|
||||
public void deleteAll() throws SQLException, AuthorizeException,
|
||||
IOException
|
||||
|
@@ -0,0 +1,44 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.content.crosswalk;
|
||||
|
||||
import org.dspace.core.Context;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
/**
|
||||
* Created by jonas - jonas@atmire.com on 21/04/17.
|
||||
* Implementation of the {@link DisseminationCrosswalk} interface that enables the ability to set a Context manually
|
||||
*/
|
||||
public abstract class ContextAwareDisseminationCrosswalk implements DisseminationCrosswalk{
|
||||
|
||||
private Context context;
|
||||
private boolean contextCreatedInternally = false;
|
||||
|
||||
public void setContext(Context context){
|
||||
this.context = context;
|
||||
}
|
||||
public Context getContext() throws SQLException {
|
||||
if(context == null|| !context.isValid()){
|
||||
context=new Context();
|
||||
contextCreatedInternally = true;
|
||||
}
|
||||
return context;
|
||||
}
|
||||
|
||||
public void handleContextCleanup() throws SQLException {
|
||||
if(contextCreatedInternally){
|
||||
context.complete();
|
||||
}else{
|
||||
context.commit();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
@@ -27,7 +27,10 @@ import org.dspace.license.CreativeCommons;
|
||||
*
|
||||
* @author Larry Stone
|
||||
* @version $Revision: 1.0 $
|
||||
*
|
||||
* @deprecated to make uniforme JSPUI and XMLUI approach the bitstream with the license in the textual format it is no longer stored see https://jira.duraspace.org/browse/DS-2604
|
||||
*/
|
||||
@Deprecated
|
||||
public class CreativeCommonsTextStreamDisseminationCrosswalk
|
||||
implements StreamDisseminationCrosswalk
|
||||
{
|
||||
|
@@ -7,15 +7,15 @@
|
||||
*/
|
||||
package org.dspace.content.crosswalk;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.jdom.Element;
|
||||
import org.jdom.Namespace;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Dissemination Crosswalk plugin -- translate DSpace native
|
||||
* metadata into an external XML format.
|
||||
@@ -107,6 +107,9 @@ public interface DisseminationCrosswalk
|
||||
* Execute crosswalk, returning one XML root element as
|
||||
* a JDOM <code>Element</code> object.
|
||||
* This is typically the root element of a document.
|
||||
* Note that, if the implementing class is of type "{@link org.dspace.content.crosswalk.ContextAwareDisseminationCrosswalk}"
|
||||
* and a context is present in the method call, you should set the context before calling this method. -> "{@link org.dspace.content.crosswalk.ContextAwareDisseminationCrosswalk#setContext(org.dspace.core.Context)}"
|
||||
* The implementing class should then use the "{@link ContextAwareDisseminationCrosswalk#getContext()}" and "{@link ContextAwareDisseminationCrosswalk#handleContextCleanup()}" to retrieve and commit/complete the context respectively
|
||||
* <p>
|
||||
*
|
||||
* @param dso the DSpace Object whose metadata to export.
|
||||
|
@@ -7,17 +7,6 @@
|
||||
*/
|
||||
package org.dspace.content.crosswalk;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.text.ParseException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Date;
|
||||
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.logging.Level;
|
||||
import org.apache.commons.lang.ArrayUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
@@ -33,6 +22,12 @@ import org.dspace.eperson.Group;
|
||||
import org.jdom.Element;
|
||||
import org.jdom.Namespace;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* METSRights Ingestion & Dissemination Crosswalk
|
||||
* <p>
|
||||
@@ -51,7 +46,7 @@ import org.jdom.Namespace;
|
||||
* @author Tim Donohue
|
||||
* @version $Revision: 2108 $
|
||||
*/
|
||||
public class METSRightsCrosswalk
|
||||
public class METSRightsCrosswalk extends ContextAwareDisseminationCrosswalk
|
||||
implements IngestionCrosswalk, DisseminationCrosswalk
|
||||
{
|
||||
/** log4j category */
|
||||
@@ -120,14 +115,14 @@ public class METSRightsCrosswalk
|
||||
* METSRights PermissionTypes.
|
||||
*
|
||||
* @param dso DSpace Object
|
||||
* @param context Context Object
|
||||
* @return XML Element corresponding to the new <RightsDeclarationMD> translation
|
||||
* @throws CrosswalkException
|
||||
* @throws IOException
|
||||
* @throws SQLException
|
||||
* @throws AuthorizeException
|
||||
*/
|
||||
@Override
|
||||
public Element disseminateElement(DSpaceObject dso)
|
||||
public Element disseminateElement(Context context,DSpaceObject dso)
|
||||
throws CrosswalkException,
|
||||
IOException, SQLException, AuthorizeException
|
||||
{
|
||||
@@ -156,7 +151,6 @@ public class METSRightsCrosswalk
|
||||
// what those rights are -- too many types of content can be stored in DSpace
|
||||
|
||||
//Get all policies on this DSpace Object
|
||||
Context context = new Context();
|
||||
List<ResourcePolicy> policies = AuthorizeManager.getPolicies(context, dso);
|
||||
|
||||
//For each DSpace policy
|
||||
@@ -282,9 +276,31 @@ public class METSRightsCrosswalk
|
||||
|
||||
}//end for each policy
|
||||
|
||||
context.complete();
|
||||
return rightsMD;
|
||||
}
|
||||
/**
|
||||
* Actually Disseminate into METSRights schema. This method locates all DSpace
|
||||
* policies (permissions) for the provided object, and translates them into
|
||||
* METSRights PermissionTypes.
|
||||
*
|
||||
* @param dso DSpace Object
|
||||
* @return XML Element corresponding to the new <RightsDeclarationMD> translation
|
||||
* @throws CrosswalkException
|
||||
* @throws IOException
|
||||
* @throws SQLException
|
||||
* @throws AuthorizeException
|
||||
* @deprecated Do not use this method, please opt for "{@link #disseminateElement(Context context, DSpaceObject dso)}" instead, as this does not internally need to create a new Context
|
||||
*/
|
||||
@Override
|
||||
@Deprecated
|
||||
public Element disseminateElement(DSpaceObject dso)
|
||||
throws CrosswalkException,
|
||||
IOException, SQLException, AuthorizeException {
|
||||
Context context = getContext();
|
||||
Element element = disseminateElement(context, dso);
|
||||
handleContextCleanup();
|
||||
return element;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Element> disseminateList(DSpaceObject dso)
|
||||
@@ -435,52 +451,42 @@ public class METSRightsCrosswalk
|
||||
public void ingest(Context context, DSpaceObject dso, List<Element> ml)
|
||||
throws CrosswalkException, IOException, SQLException, AuthorizeException
|
||||
{
|
||||
// we cannot crosswalk METSRights to a SITE object
|
||||
// SITE objects are not supported by the METSRightsCrosswalk
|
||||
if (dso.getType() == Constants.SITE)
|
||||
{
|
||||
throw new CrosswalkObjectNotSupported("Wrong target object type, METSRightsCrosswalk cannot crosswalk a SITE object.");
|
||||
}
|
||||
|
||||
//First, clear all existing Policies on this DSpace Object
|
||||
// as we don't want them to conflict with policies we will be adding
|
||||
if(!ml.isEmpty())
|
||||
// If we're fed the top-level <RightsDeclarationMD> wrapper element, recurse into its guts.
|
||||
// What we need to analyze are the <Context> elements underneath it.
|
||||
if(!ml.isEmpty() && ml.get(0).getName().equals("RightsDeclarationMD"))
|
||||
{
|
||||
AuthorizeManager.removeAllPolicies(context, dso);
|
||||
ingest(context, dso, ml.get(0).getChildren());
|
||||
}
|
||||
|
||||
// Loop through each Element in the List
|
||||
List<ResourcePolicy> policies = new ArrayList<ResourcePolicy>();
|
||||
else
|
||||
{
|
||||
// Loop through each <Context> Element in the passed in List, creating a ResourcePolicy for each
|
||||
List<ResourcePolicy> policies = new ArrayList<>();
|
||||
for (Element element : ml)
|
||||
{
|
||||
// if we're fed a <RightsDeclarationMD> wrapper object, recurse on its guts:
|
||||
if (element.getName().equals("RightsDeclarationMD"))
|
||||
{
|
||||
ingest(context, dso, element.getChildren());
|
||||
}
|
||||
// "Context" section (where permissions are stored)
|
||||
else if (element.getName().equals("Context"))
|
||||
// Must be a "Context" section (where permissions are stored)
|
||||
if (element.getName().equals("Context"))
|
||||
{
|
||||
//get what class of context this is
|
||||
String contextClass = element.getAttributeValue("CONTEXTCLASS");
|
||||
|
||||
if ((element.getAttributeValue("start-date") != null)
|
||||
|| (element.getAttributeValue("end-date") != null)
|
||||
|| (element.getAttributeValue("rpName") != null))
|
||||
{
|
||||
SimpleDateFormat sdf = new SimpleDateFormat( "yyyy-MM-dd" );
|
||||
try {
|
||||
ResourcePolicy rp = ResourcePolicy.create(context);
|
||||
if (element.getAttributeValue("CONTEXTCLASS").equalsIgnoreCase("GENERAL PUBLIC")) {
|
||||
Group anonGroup = Group.find(context, 0);
|
||||
rp.setGroup(anonGroup);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (element.getAttributeValue("CONTEXTCLASS").equalsIgnoreCase("REPOSITORY MGR")) {
|
||||
Group adminGroup = Group.find(context, 1);
|
||||
rp.setGroup(adminGroup);
|
||||
}
|
||||
SimpleDateFormat sdf = new SimpleDateFormat( "yyyy-MM-dd" );
|
||||
|
||||
// get reference to the <Permissions> element
|
||||
// Note: we are assuming here that there will only ever be ONE <Permissions>
|
||||
// element. Currently there are no known use cases for multiple.
|
||||
Element permsElement = element.getChild("Permissions", METSRights_NS);
|
||||
if(permsElement == null) {
|
||||
log.error("No <Permissions> element was found. Skipping this <Context> element.");
|
||||
continue;
|
||||
}
|
||||
|
||||
if (element.getAttributeValue("rpName") != null)
|
||||
{
|
||||
rp.setRpName(element.getAttributeValue("rpName"));
|
||||
@@ -495,74 +501,31 @@ public class METSRightsCrosswalk
|
||||
rp.setEndDate(sdf.parse(element.getAttributeValue("end-date")));
|
||||
}
|
||||
}catch (ParseException ex) {
|
||||
java.util.logging.Logger.getLogger(METSRightsCrosswalk.class.getName()).log(Level.SEVERE, null, ex);
|
||||
log.error("Failed to parse embargo date. The date needs to be in the format 'yyyy-MM-dd'.", ex);
|
||||
}
|
||||
|
||||
List<Element> le = new ArrayList<Element>(element.getChildren());
|
||||
for (Element el : le)
|
||||
{
|
||||
if ((el.getAttributeValue("DISCOVER").equalsIgnoreCase("true"))
|
||||
&& (el.getAttributeValue("DISPLAY").equalsIgnoreCase("true")))
|
||||
{
|
||||
if (el.getAttributeValue("DELETE").equalsIgnoreCase("false"))
|
||||
{
|
||||
if (el.getAttributeValue("MODIFY").equalsIgnoreCase("false"))
|
||||
{
|
||||
rp.setAction(Constants.READ);
|
||||
}
|
||||
else
|
||||
{
|
||||
rp.setAction(Constants.WRITE);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (el.getAttributeValue("MODIFY").equalsIgnoreCase("true"))
|
||||
{
|
||||
rp.setAction(Constants.DELETE);
|
||||
if ((el.getAttributeValue("COPY").equalsIgnoreCase("true"))
|
||||
&&(el.getAttributeValue("DUPLICATE").equalsIgnoreCase("true"))
|
||||
&&(el.getAttributeValue("PRINT").equalsIgnoreCase("true")))
|
||||
{
|
||||
rp.setAction(Constants.ADMIN);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
policies.add(rp);
|
||||
} catch (NullPointerException ex) {
|
||||
java.util.logging.Logger.getLogger(METSRightsCrosswalk.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
assignPermissions(context, dso, policies);
|
||||
}
|
||||
else
|
||||
{
|
||||
//also get reference to the <Permissions> element
|
||||
Element permsElement = element.getChild("Permissions", METSRights_NS);
|
||||
|
||||
//Check if this permission pertains to Anonymous users
|
||||
if(ANONYMOUS_CONTEXTCLASS.equals(contextClass))
|
||||
{
|
||||
//get DSpace Anonymous group, ID=0
|
||||
Group anonGroup = Group.find(context, 0);
|
||||
Group anonGroup = Group.find(context, Group.ANONYMOUS_ID);
|
||||
if(anonGroup==null)
|
||||
{
|
||||
throw new CrosswalkInternalException("The DSpace database has not been properly initialized. The Anonymous Group is missing from the database.");
|
||||
}
|
||||
|
||||
assignPermissions(context, dso, anonGroup, permsElement);
|
||||
rp.setGroup(anonGroup);
|
||||
} // else if this permission declaration pertains to Administrators
|
||||
else if(ADMIN_CONTEXTCLASS.equals(contextClass))
|
||||
{
|
||||
//get DSpace Administrator group, ID=1
|
||||
Group adminGroup = Group.find(context, 1);
|
||||
Group adminGroup = Group.find(context, Group.ADMIN_ID);
|
||||
if(adminGroup==null)
|
||||
{
|
||||
throw new CrosswalkInternalException("The DSpace database has not been properly initialized. The Administrator Group is missing from the database.");
|
||||
}
|
||||
|
||||
assignPermissions(context, dso, adminGroup, permsElement);
|
||||
rp.setGroup(adminGroup);
|
||||
} // else if this permission pertains to another DSpace group
|
||||
else if(GROUP_CONTEXTCLASS.equals(contextClass))
|
||||
{
|
||||
@@ -591,8 +554,8 @@ public class METSRightsCrosswalk
|
||||
+ "Please restore this group using the SITE AIP, or recreate it.");
|
||||
}
|
||||
|
||||
//assign permissions to group on this object
|
||||
assignPermissions(context, dso, group, permsElement);
|
||||
//assign group to policy
|
||||
rp.setGroup(group);
|
||||
}
|
||||
catch(PackageException pe)
|
||||
{
|
||||
@@ -600,7 +563,7 @@ public class METSRightsCrosswalk
|
||||
//We'll just wrap it as a CrosswalkException and throw it upwards
|
||||
throw new CrosswalkException(pe);
|
||||
}
|
||||
}//end if Group
|
||||
}// else if this permission pertains to a DSpace person
|
||||
else if(PERSON_CONTEXTCLASS.equals(contextClass))
|
||||
{
|
||||
//we need to find the person it pertains to
|
||||
@@ -629,89 +592,26 @@ public class METSRightsCrosswalk
|
||||
+ "Please restore this Person object using the SITE AIP, or recreate it.");
|
||||
}
|
||||
|
||||
//assign permissions to person on this object
|
||||
assignPermissions(context, dso, person, permsElement);
|
||||
//assign person to the policy
|
||||
rp.setEPerson(person);
|
||||
}//end if Person
|
||||
else
|
||||
else {
|
||||
log.error("Unrecognized CONTEXTCLASS: " + contextClass);
|
||||
}
|
||||
|
||||
//set permissions on policy add to list of policies
|
||||
rp.setAction(parsePermissions(permsElement));
|
||||
policies.add(rp);
|
||||
} //end if "Context" element
|
||||
}//end while loop
|
||||
}
|
||||
}//end for loop
|
||||
|
||||
|
||||
/**
|
||||
* Parses the 'permsElement' (corresponding to a <code>Permissions</code>
|
||||
* element), and assigns those permissions to the specified Group
|
||||
* on the specified DSpace Object.
|
||||
*
|
||||
* @param context DSpace context object
|
||||
* @param dso The DSpace Object
|
||||
* @param group The DSpace Group
|
||||
* @param permsElement The METSRights <code>Permissions</code> element
|
||||
*/
|
||||
private void assignPermissions(Context context, DSpaceObject dso, List<ResourcePolicy> policies)
|
||||
throws SQLException, AuthorizeException
|
||||
{
|
||||
// Finally, we need to remove any existing policies from the current object,
|
||||
// and replace them with the policies provided via METSRights. NOTE:
|
||||
// if the list of policies provided by METSRights is an empty list, then
|
||||
// the final object will have no policies attached.
|
||||
AuthorizeManager.removeAllPolicies(context, dso);
|
||||
if (policies == null){
|
||||
throw new AuthorizeException("Policies are null");
|
||||
}
|
||||
else{
|
||||
AuthorizeManager.addPolicies(context, policies, dso);
|
||||
}
|
||||
}
|
||||
|
||||
private void assignPermissions(Context context, DSpaceObject dso, Group group, Element permsElement)
|
||||
throws SQLException, AuthorizeException
|
||||
{
|
||||
//first, parse our permissions to determine which action we are allowing in DSpace
|
||||
int actionID = parsePermissions(permsElement);
|
||||
|
||||
//If action ID is less than base READ permissions (value=0),
|
||||
// then something must've gone wrong in the parsing
|
||||
if(actionID < Constants.READ)
|
||||
{
|
||||
log.warn("Unable to properly restore all access permissions on object ("
|
||||
+ "type=" + Constants.typeText[dso.getType()] + ", "
|
||||
+ "handle=" + dso.getHandle() + ", "
|
||||
+ "ID=" + dso.getID()
|
||||
+ ") for group '" + group.getName() + "'.");
|
||||
}
|
||||
|
||||
//Otherwise, add the appropriate group policy for this object
|
||||
AuthorizeManager.addPolicy(context, dso, actionID, group);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the 'permsElement' (corresponding to a <code>Permissions</code>
|
||||
* element), and assigns those permissions to the specified EPerson
|
||||
* on the specified DSpace Object.
|
||||
*
|
||||
* @param context DSpace context object
|
||||
* @param dso The DSpace Object
|
||||
* @param person The DSpace EPerson
|
||||
* @param permsElement The METSRights <code>Permissions</code> element
|
||||
*/
|
||||
private void assignPermissions(Context context, DSpaceObject dso, EPerson person, Element permsElement)
|
||||
throws SQLException, AuthorizeException
|
||||
{
|
||||
//first, parse our permissions to determine which action we are allowing in DSpace
|
||||
int actionID = parsePermissions(permsElement);
|
||||
|
||||
//If action ID is less than base READ permissions (value=0),
|
||||
// then something must've gone wrong in the parsing
|
||||
if(actionID < Constants.READ)
|
||||
{
|
||||
log.warn("Unable to properly restore all access permissions on object ("
|
||||
+ "type=" + Constants.typeText[dso.getType()] + ", "
|
||||
+ "handle=" + dso.getHandle() + ", "
|
||||
+ "ID=" + dso.getID()
|
||||
+ ") for person '" + person.getEmail() + "'.");
|
||||
}
|
||||
|
||||
//Otherwise, add the appropriate EPerson policy for this object
|
||||
AuthorizeManager.addPolicy(context, dso, actionID, person);
|
||||
} // end else
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -784,4 +684,5 @@ private void assignPermissions(Context context, DSpaceObject dso, List<ResourceP
|
||||
// return -1 to signify failure (as 0 = READ permissions)
|
||||
return -1;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -21,15 +21,7 @@ import java.util.zip.ZipFile;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.BitstreamFormat;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.FormatIdentifier;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.WorkspaceItem;
|
||||
import org.dspace.content.*;
|
||||
import org.dspace.content.crosswalk.CrosswalkException;
|
||||
import org.dspace.content.crosswalk.MetadataValidationException;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
@@ -37,6 +29,8 @@ import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.handle.HandleManager;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
|
||||
import org.jdom.Element;
|
||||
|
||||
/**
|
||||
@@ -324,18 +318,18 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester
|
||||
}
|
||||
else
|
||||
{
|
||||
ZipFile zip = new ZipFile(pkgFile);
|
||||
|
||||
try(ZipFile zip = new ZipFile(pkgFile))
|
||||
{
|
||||
// Retrieve the manifest file entry (named mets.xml)
|
||||
ZipEntry manifestEntry = zip.getEntry(METSManifest.MANIFEST_FILE);
|
||||
|
||||
if(manifestEntry!=null)
|
||||
{
|
||||
// parse the manifest and sanity-check it.
|
||||
manifest = METSManifest.create(zip.getInputStream(manifestEntry),
|
||||
validate, getConfigurationName());
|
||||
|
||||
// close the Zip file for now
|
||||
// (we'll extract the other files from zip when we need them)
|
||||
zip.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// return our parsed out METS manifest
|
||||
@@ -660,8 +654,24 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester
|
||||
addBitstreams(context, item, manifest, pkgFile, params, callback);
|
||||
|
||||
// have subclass manage license since it may be extra package file.
|
||||
addLicense(context, item, license, (Collection) dso
|
||||
.getParentObject(), params);
|
||||
Collection owningCollection = (Collection) dso.getParentObject();
|
||||
if(owningCollection == null)
|
||||
{
|
||||
//We are probably dealing with an item that isn't archived yet
|
||||
InProgressSubmission inProgressSubmission = WorkspaceItem.findByItem(context, item);
|
||||
if(inProgressSubmission == null)
|
||||
{
|
||||
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("xmlworkflow"))
|
||||
{
|
||||
inProgressSubmission = XmlWorkflowItem.findByItem(context, item);
|
||||
}else{
|
||||
inProgressSubmission = WorkflowItem.findByItem(context, item);
|
||||
}
|
||||
}
|
||||
owningCollection = inProgressSubmission.getCollection();
|
||||
}
|
||||
|
||||
addLicense(context, item, license, owningCollection, params);
|
||||
|
||||
// FIXME ?
|
||||
// should set lastModifiedTime e.g. when ingesting AIP.
|
||||
|
@@ -127,6 +127,8 @@ public class Constants
|
||||
*/
|
||||
public static final int ADMIN = 11;
|
||||
|
||||
public static final int WITHDRAWN_READ = 12;
|
||||
|
||||
/** Position of front page news item -- top box */
|
||||
public static final int NEWS_TOP = 0;
|
||||
|
||||
@@ -139,7 +141,7 @@ public class Constants
|
||||
public static final String[] actionText = { "READ", "WRITE",
|
||||
"OBSOLETE (DELETE)", "ADD", "REMOVE", "WORKFLOW_STEP_1",
|
||||
"WORKFLOW_STEP_2", "WORKFLOW_STEP_3", "WORKFLOW_ABORT",
|
||||
"DEFAULT_BITSTREAM_READ", "DEFAULT_ITEM_READ", "ADMIN" };
|
||||
"DEFAULT_BITSTREAM_READ", "DEFAULT_ITEM_READ", "ADMIN", "WITHDRAWN_READ" };
|
||||
|
||||
/**
|
||||
* generating constants for the relevance array dynamically is simple: just
|
||||
@@ -175,7 +177,9 @@ public class Constants
|
||||
0, // 8 - WORKFLOW_ABORT
|
||||
RCOLLECTION, // 9 - DEFAULT_BITSTREAM_READ
|
||||
RCOLLECTION, // 10 - DEFAULT_ITEM_READ
|
||||
RITEM | RCOLLECTION | RCOMMUNITY // 11 - ADMIN
|
||||
RITEM | RCOLLECTION | RCOMMUNITY, // 11 - ADMIN
|
||||
RBITSTREAM | RBUNDLE | RITEM // 12 - WITHDRAWN_READ
|
||||
|
||||
};
|
||||
|
||||
public static final String DEFAULT_ENCODING = "UTF-8";
|
||||
|
@@ -474,8 +474,18 @@ public class Email
|
||||
System.out.println(" - To: " + to);
|
||||
System.out.println(" - Subject: " + subject);
|
||||
System.out.println(" - Server: " + server);
|
||||
boolean disabled = ConfigurationManager.getBooleanProperty("mail.server.disabled", false);
|
||||
try
|
||||
{
|
||||
if( disabled)
|
||||
{
|
||||
System.err.println("\nError sending email:");
|
||||
System.err.println(" - Error: cannot test email because mail.server.disabled is set to true");
|
||||
System.err.println("\nPlease see the DSpace documentation for assistance.\n");
|
||||
System.err.println("\n");
|
||||
System.exit(1);
|
||||
return;
|
||||
}
|
||||
e.send();
|
||||
}
|
||||
catch (MessagingException me)
|
||||
|
@@ -16,6 +16,9 @@ import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.PrintWriter;
|
||||
|
||||
import org.dspace.core.service.NewsService;
|
||||
import org.dspace.utils.DSpace;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -39,6 +42,10 @@ public class NewsManager
|
||||
*/
|
||||
public static String readNewsFile(String newsFile)
|
||||
{
|
||||
NewsService newsService = new DSpace().getSingletonService(NewsService.class);
|
||||
if (!newsService.validate(newsFile)) {
|
||||
throw new IllegalArgumentException("The file "+ newsFile + " is not a valid news file");
|
||||
}
|
||||
String fileName = getNewsFilePath();
|
||||
|
||||
fileName += newsFile;
|
||||
@@ -81,6 +88,10 @@ public class NewsManager
|
||||
*/
|
||||
public static String writeNewsFile(String newsFile, String news)
|
||||
{
|
||||
NewsService newsService = new DSpace().getSingletonService(NewsService.class);
|
||||
if (!newsService.validate(newsFile)) {
|
||||
throw new IllegalArgumentException("The file "+ newsFile + " is not a valid news file");
|
||||
}
|
||||
String fileName = getNewsFilePath();
|
||||
|
||||
fileName += newsFile;
|
||||
|
@@ -0,0 +1,29 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.core.service.NewsService;
|
||||
|
||||
public class NewsServiceImpl implements NewsService {
|
||||
private List<String> acceptableFilenames;
|
||||
|
||||
public void setAcceptableFilenames(List<String> acceptableFilenames) {
|
||||
this.acceptableFilenames = acceptableFilenames;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean validate(String newsName) {
|
||||
if (acceptableFilenames != null) {
|
||||
return acceptableFilenames.contains(newsName);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,12 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.core.service;
|
||||
|
||||
public interface NewsService {
|
||||
boolean validate(String newsName);
|
||||
}
|
@@ -164,7 +164,7 @@ public class CurationCli
|
||||
}
|
||||
else
|
||||
{
|
||||
c.setIgnoreAuthorization(true);
|
||||
c.turnOffAuthorisationSystem();
|
||||
}
|
||||
|
||||
Curator curator = new Curator();
|
||||
|
@@ -29,36 +29,50 @@ import org.dspace.core.PluginManager;
|
||||
/**
|
||||
* TaskResolver takes a logical name of a curation task and attempts to deliver
|
||||
* a suitable implementation object. Supported implementation types include:
|
||||
* (1) Classpath-local Java classes configured and loaded via PluginManager.
|
||||
* (2) Local script-based tasks, viz. coded in any scripting language whose
|
||||
* <ol>
|
||||
* <li> Classpath-local Java classes configured and loaded via PluginManager</li>.
|
||||
* <li> Local script-based tasks, viz. coded in any scripting language whose
|
||||
* runtimes are accessible via the JSR-223 scripting API. This really amounts
|
||||
* to the family of dynamic JVM languages: JRuby, Jython, Groovy, Javascript, etc
|
||||
* Note that the requisite jars and other resources for these languages must be
|
||||
* installed in the DSpace instance for them to be used here.
|
||||
* to the family of dynamic JVM languages: JRuby, Jython, Groovy, Javascript, etc.</li>
|
||||
* </ol>
|
||||
* <p>
|
||||
* Note that the requisite jars and other resources for these languages must
|
||||
* be installed in the DSpace instance for them to be used here.
|
||||
* Further work may involve remote URL-loadable code, etc.
|
||||
*
|
||||
* <p>
|
||||
* Scripted tasks are managed in a directory configured with the
|
||||
* dspace/config/modules/curate.cfg property "script.dir". A catalog of
|
||||
* scripted tasks named 'task.catalog" is kept in this directory.
|
||||
* {@code dspace/config/modules/curate.cfg} property "script.dir".
|
||||
* A catalog of
|
||||
* scripted tasks named "task.catalog" is kept in this directory.
|
||||
* Each task has a 'descriptor' property with value syntax:
|
||||
* <engine>|<relFilePath>|<implClassCtor>
|
||||
* <br/>
|
||||
* {@code <engine>|<relFilePath>|<implClassCtor>}
|
||||
*
|
||||
* <p>
|
||||
* An example property:
|
||||
* <br/>
|
||||
* {@code linkchecker = ruby|rubytask.rb|LinkChecker.new}
|
||||
*
|
||||
* linkchecker = ruby|rubytask.rb|LinkChecker.new
|
||||
*
|
||||
* <p>
|
||||
* This descriptor means that a 'ruby' script engine will be created,
|
||||
* a script file named 'rubytask.rb' in the directory <script.dir> will be
|
||||
* loaded and the resolver will expect an evaluation of 'LinkChecker.new' will
|
||||
* provide a correct implementation object.
|
||||
* a script file named 'rubytask.rb' in the directory {@code <script.dir>}
|
||||
* will be
|
||||
* loaded and the resolver will expect an evaluation of 'LinkChecker.new'
|
||||
* will provide a correct implementation object.
|
||||
*
|
||||
* <p>
|
||||
* Script files may embed their descriptors to facilitate deployment.
|
||||
* To accomplish this, a script must include the descriptor string with syntax:
|
||||
* $td=<descriptor> somewhere on a comment line. for example:
|
||||
* To accomplish this, a script must include the descriptor string with
|
||||
* syntax {@code $td=<descriptor>} somewhere on a comment line. For example:
|
||||
*
|
||||
* # My descriptor $td=ruby|rubytask.rb|LinkChecker.new
|
||||
* <p>
|
||||
* {@code # My descriptor $td=ruby|rubytask.rb|LinkChecker.new}
|
||||
*
|
||||
* For portability, the <relFilePath> component may be omitted in this context.
|
||||
* Thus, $td=ruby||LinkChecker.new will be expanded to a descriptor
|
||||
* <p>
|
||||
* For portability, the {@code <relFilePath>} component may be omitted in
|
||||
* this context.
|
||||
* Thus, {@code $td=ruby||LinkChecker.new} will be expanded to a descriptor
|
||||
* with the name of the embedding file.
|
||||
*
|
||||
* @author richardrodgers
|
||||
|
@@ -218,10 +218,20 @@ public class DiscoverQuery {
|
||||
this.facetOffset = facetOffset;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the fields which you want Discovery to return in the search results.
|
||||
* It is HIGHLY recommended to limit the fields returned, as by default
|
||||
* some backends (like Solr) will return everything.
|
||||
* @param field field to add to the list of fields returned
|
||||
*/
|
||||
public void addSearchField(String field){
|
||||
this.searchFields.add(field);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of fields which Discovery will return in the search results
|
||||
* @return List of field names
|
||||
*/
|
||||
public List<String> getSearchFields() {
|
||||
return searchFields;
|
||||
}
|
||||
|
@@ -39,7 +39,7 @@ public class IndexClient {
|
||||
public static void main(String[] args) throws SQLException, IOException, SearchServiceException {
|
||||
|
||||
Context context = new Context();
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
String usage = "org.dspace.discovery.IndexClient [-cbhf[r <item handle>]] or nothing to update/clean an existing index.";
|
||||
Options options = new Options();
|
||||
|
@@ -113,4 +113,11 @@ public interface SearchService {
|
||||
* @return the indexed field
|
||||
*/
|
||||
String toSortFieldIndex(String metadataField, String type);
|
||||
|
||||
/**
|
||||
* Utility method to escape any special characters in a user's query
|
||||
* @param query
|
||||
* @return query with any special characters escaped
|
||||
*/
|
||||
String escapeQueryChars(String query);
|
||||
}
|
||||
|
@@ -119,6 +119,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
private static final Logger log = Logger.getLogger(SolrServiceImpl.class);
|
||||
|
||||
protected static final String LAST_INDEXED_FIELD = "SolrIndexer.lastIndexed";
|
||||
protected static final String HANDLE_FIELD = "handle";
|
||||
protected static final String RESOURCE_TYPE_FIELD = "search.resourcetype";
|
||||
protected static final String RESOURCE_ID_FIELD = "search.resourceid";
|
||||
|
||||
public static final String FILTER_SEPARATOR = "\n|||\n";
|
||||
|
||||
@@ -149,9 +152,11 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
|
||||
solr.setBaseURL(solrService);
|
||||
solr.setUseMultiPartPost(true);
|
||||
// Dummy/test query to search for Item (type=2) of ID=1
|
||||
SolrQuery solrQuery = new SolrQuery()
|
||||
.setQuery("search.resourcetype:2 AND search.resourceid:1");
|
||||
|
||||
.setQuery(RESOURCE_TYPE_FIELD + ":2 AND " + RESOURCE_ID_FIELD + ":1");
|
||||
// Only return obj identifier fields in result doc
|
||||
solrQuery.setFields(RESOURCE_TYPE_FIELD, RESOURCE_ID_FIELD);
|
||||
solr.query(solrQuery);
|
||||
|
||||
// As long as Solr initialized, check with DatabaseUtils to see
|
||||
@@ -323,7 +328,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
|
||||
try {
|
||||
if(getSolr() != null){
|
||||
getSolr().deleteByQuery("handle:\"" + handle + "\"");
|
||||
getSolr().deleteByQuery(HANDLE_FIELD + ":\"" + handle + "\"");
|
||||
if(commit)
|
||||
{
|
||||
getSolr().commit();
|
||||
@@ -462,10 +467,13 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
}
|
||||
if (force)
|
||||
{
|
||||
getSolr().deleteByQuery("search.resourcetype:[2 TO 4]");
|
||||
getSolr().deleteByQuery(RESOURCE_TYPE_FIELD + ":[2 TO 4]");
|
||||
} else {
|
||||
SolrQuery query = new SolrQuery();
|
||||
query.setQuery("search.resourcetype:[2 TO 4]");
|
||||
// Query for all indexed Items, Collections and Communities,
|
||||
// returning just their handle
|
||||
query.setFields(HANDLE_FIELD);
|
||||
query.setQuery(RESOURCE_TYPE_FIELD + ":[2 TO 4]");
|
||||
QueryResponse rsp = getSolr().query(query);
|
||||
SolrDocumentList docs = rsp.getResults();
|
||||
|
||||
@@ -475,7 +483,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
|
||||
SolrDocument doc = (SolrDocument) iter.next();
|
||||
|
||||
String handle = (String) doc.getFieldValue("handle");
|
||||
String handle = (String) doc.getFieldValue(HANDLE_FIELD);
|
||||
|
||||
DSpaceObject o = HandleManager.resolveToObject(context, handle);
|
||||
|
||||
@@ -616,7 +624,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
boolean inIndex = false;
|
||||
|
||||
SolrQuery query = new SolrQuery();
|
||||
query.setQuery("handle:" + handle);
|
||||
query.setQuery(HANDLE_FIELD + ":" + handle);
|
||||
// Specify that we ONLY want the LAST_INDEXED_FIELD returned in the field list (fl)
|
||||
query.setFields(LAST_INDEXED_FIELD);
|
||||
QueryResponse rsp;
|
||||
|
||||
try {
|
||||
@@ -1444,9 +1454,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
// New fields to weaken the dependence on handles, and allow for faster
|
||||
// list display
|
||||
doc.addField("search.uniqueid", type+"-"+id);
|
||||
doc.addField("search.resourcetype", Integer.toString(type));
|
||||
doc.addField(RESOURCE_TYPE_FIELD, Integer.toString(type));
|
||||
|
||||
doc.addField("search.resourceid", Integer.toString(id));
|
||||
doc.addField(RESOURCE_ID_FIELD, Integer.toString(id));
|
||||
|
||||
// want to be able to search for handle, so use keyword
|
||||
// (not tokenized, but it is indexed)
|
||||
@@ -1454,7 +1464,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
{
|
||||
// want to be able to search for handle, so use keyword
|
||||
// (not tokenized, but it is indexed)
|
||||
doc.addField("handle", handle);
|
||||
doc.addField(HANDLE_FIELD, handle);
|
||||
}
|
||||
|
||||
if (locations != null)
|
||||
@@ -1584,7 +1594,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
discoveryQuery.addFilterQueries("location:l" + dso.getID());
|
||||
} else if (dso instanceof Item)
|
||||
{
|
||||
discoveryQuery.addFilterQueries("handle:" + dso.getHandle());
|
||||
discoveryQuery.addFilterQueries(HANDLE_FIELD + ":" + dso.getHandle());
|
||||
}
|
||||
}
|
||||
return search(context, discoveryQuery, includeUnDiscoverable);
|
||||
@@ -1620,6 +1630,18 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
}
|
||||
|
||||
solrQuery.setQuery(query);
|
||||
|
||||
// Add any search fields to our query. This is the limited list
|
||||
// of fields that will be returned in the solr result
|
||||
for(String fieldName : discoveryQuery.getSearchFields())
|
||||
{
|
||||
solrQuery.addField(fieldName);
|
||||
}
|
||||
// Also ensure a few key obj identifier fields are returned with every query
|
||||
solrQuery.addField(HANDLE_FIELD);
|
||||
solrQuery.addField(RESOURCE_TYPE_FIELD);
|
||||
solrQuery.addField(RESOURCE_ID_FIELD);
|
||||
|
||||
if(discoveryQuery.isSpellCheck())
|
||||
{
|
||||
solrQuery.setParam(SpellingParams.SPELLCHECK_Q, query);
|
||||
@@ -1640,7 +1662,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
}
|
||||
if(discoveryQuery.getDSpaceObjectFilter() != -1)
|
||||
{
|
||||
solrQuery.addFilterQuery("search.resourcetype:" + discoveryQuery.getDSpaceObjectFilter());
|
||||
solrQuery.addFilterQuery(RESOURCE_TYPE_FIELD + ":" + discoveryQuery.getDSpaceObjectFilter());
|
||||
}
|
||||
|
||||
for (int i = 0; i < discoveryQuery.getFieldPresentQueries().size(); i++)
|
||||
@@ -1753,7 +1775,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
query.addFilterQueries("location:l" + dso.getID());
|
||||
} else if (dso instanceof Item)
|
||||
{
|
||||
query.addFilterQueries("handle:" + dso.getHandle());
|
||||
query.addFilterQueries(HANDLE_FIELD + ":" + dso.getHandle());
|
||||
}
|
||||
}
|
||||
return searchJSON(context, query, jsonIdentifier);
|
||||
@@ -1807,7 +1829,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
{
|
||||
result.addDSpaceObject(dso);
|
||||
} else {
|
||||
log.error(LogManager.getHeader(context, "Error while retrieving DSpace object from discovery index", "Handle: " + doc.getFirstValue("handle")));
|
||||
log.error(LogManager.getHeader(context, "Error while retrieving DSpace object from discovery index", "Handle: " + doc.getFirstValue(HANDLE_FIELD)));
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -1926,9 +1948,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
|
||||
protected static DSpaceObject findDSpaceObject(Context context, SolrDocument doc) throws SQLException {
|
||||
|
||||
Integer type = (Integer) doc.getFirstValue("search.resourcetype");
|
||||
Integer id = (Integer) doc.getFirstValue("search.resourceid");
|
||||
String handle = (String) doc.getFirstValue("handle");
|
||||
Integer type = (Integer) doc.getFirstValue(RESOURCE_TYPE_FIELD);
|
||||
Integer id = (Integer) doc.getFirstValue(RESOURCE_ID_FIELD);
|
||||
String handle = (String) doc.getFirstValue(HANDLE_FIELD);
|
||||
|
||||
if (type != null && id != null)
|
||||
{
|
||||
@@ -1981,7 +2003,8 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
|
||||
SolrQuery solrQuery = new SolrQuery();
|
||||
solrQuery.setQuery(query);
|
||||
solrQuery.setFields("search.resourceid", "search.resourcetype");
|
||||
//Only return obj identifier fields in result doc
|
||||
solrQuery.setFields(RESOURCE_ID_FIELD, RESOURCE_TYPE_FIELD);
|
||||
solrQuery.setStart(offset);
|
||||
solrQuery.setRows(max);
|
||||
if (orderfield != null)
|
||||
@@ -2001,7 +2024,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
{
|
||||
SolrDocument doc = (SolrDocument) iter.next();
|
||||
|
||||
DSpaceObject o = DSpaceObject.find(context, (Integer) doc.getFirstValue("search.resourcetype"), (Integer) doc.getFirstValue("search.resourceid"));
|
||||
DSpaceObject o = DSpaceObject.find(context, (Integer) doc.getFirstValue(RESOURCE_TYPE_FIELD), (Integer) doc.getFirstValue(RESOURCE_ID_FIELD));
|
||||
|
||||
if (o != null)
|
||||
{
|
||||
@@ -2089,7 +2112,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
try{
|
||||
SolrQuery solrQuery = new SolrQuery();
|
||||
//Set the query to handle since this is unique
|
||||
solrQuery.setQuery("handle: " + item.getHandle());
|
||||
solrQuery.setQuery(HANDLE_FIELD + ": " + item.getHandle());
|
||||
//Only return obj identifier fields in result doc
|
||||
solrQuery.setFields(HANDLE_FIELD, RESOURCE_TYPE_FIELD, RESOURCE_ID_FIELD);
|
||||
//Add the more like this parameters !
|
||||
solrQuery.setParam(MoreLikeThisParams.MLT, true);
|
||||
//Add a comma separated list of the similar fields
|
||||
@@ -2320,4 +2345,13 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
throw new SearchServiceException(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String escapeQueryChars(String query) {
|
||||
// Use Solr's built in query escape tool
|
||||
// WARNING: You should only escape characters from user entered queries,
|
||||
// otherwise you may accidentally BREAK field-based queries (which often
|
||||
// rely on special characters to separate the field from the query value)
|
||||
return ClientUtils.escapeQueryChars(query);
|
||||
}
|
||||
}
|
||||
|
@@ -91,10 +91,9 @@ public class EmbargoManager
|
||||
}
|
||||
}
|
||||
String slift = myLift.toString();
|
||||
boolean ignoreAuth = context.ignoreAuthorization();
|
||||
try
|
||||
{
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
item.clearMetadata(lift_schema, lift_element, lift_qualifier, Item.ANY);
|
||||
item.addMetadata(lift_schema, lift_element, lift_qualifier, null, slift);
|
||||
log.info("Set embargo on Item "+item.getHandle()+", expires on: "+slift);
|
||||
@@ -105,7 +104,7 @@ public class EmbargoManager
|
||||
}
|
||||
finally
|
||||
{
|
||||
context.setIgnoreAuthorization(ignoreAuth);
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -267,7 +266,7 @@ public class EmbargoManager
|
||||
try
|
||||
{
|
||||
context = new Context();
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
Date now = new Date();
|
||||
|
||||
// scan items under embargo
|
||||
|
@@ -499,7 +499,7 @@ public class EPerson extends DSpaceObject
|
||||
break;
|
||||
|
||||
case LANGUAGE:
|
||||
s = "m_text_value";
|
||||
s = "m.text_value";
|
||||
t = "language";
|
||||
break;
|
||||
case NETID:
|
||||
@@ -507,23 +507,26 @@ public class EPerson extends DSpaceObject
|
||||
break;
|
||||
|
||||
default:
|
||||
s = "m_text_value";
|
||||
s = "m.text_value";
|
||||
t = "lastname";
|
||||
}
|
||||
|
||||
// NOTE: The use of 's' in the order by clause can not cause an SQL
|
||||
// injection because the string is derived from constant values above.
|
||||
TableRowIterator rows = DatabaseManager.query(context, "SELECT * FROM eperson e ORDER BY ?",s);
|
||||
TableRowIterator rows;
|
||||
if(!t.equals("")) {
|
||||
rows = DatabaseManager.query(context,
|
||||
"SELECT * FROM eperson e " +
|
||||
"LEFT JOIN metadatavalue m on (m.resource_id = e.eperson_id and m.resource_type_id = ? and m.metadata_field_id = ?) " +
|
||||
"ORDER BY ?",
|
||||
"ORDER BY " + s,
|
||||
Constants.EPERSON,
|
||||
MetadataField.findByElement(context, MetadataSchema.find(context, "eperson").getSchemaID(), t, null).getFieldID(),
|
||||
s
|
||||
MetadataField.findByElement(context, MetadataSchema.find(context, "eperson").getSchemaID(), t, null).getFieldID()
|
||||
);
|
||||
}
|
||||
else {
|
||||
rows = DatabaseManager.query(context, "SELECT * FROM eperson e ORDER BY " + s);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
@@ -358,9 +358,9 @@ public class Group extends DSpaceObject
|
||||
}
|
||||
|
||||
/**
|
||||
* fast check to see if an eperson is a member called with eperson id, does
|
||||
* database lookup without instantiating all of the epeople objects and is
|
||||
* thus a static method
|
||||
* fast check to see if the current EPerson is a member of a Group. Does
|
||||
* database lookup without instantiating all of the EPerson objects and is
|
||||
* thus a static method.
|
||||
*
|
||||
* @param c
|
||||
* context
|
||||
@@ -380,6 +380,29 @@ public class Group extends DSpaceObject
|
||||
return epersonInGroup(c, groupid, currentuser);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fast check to see if a given EPerson is a member of a Group.
|
||||
* Does database lookup without instantiating all of the EPerson objects and
|
||||
* is thus a static method.
|
||||
*
|
||||
* @param c current DSpace context.
|
||||
* @param eperson candidate to test for membership.
|
||||
* @param groupid group whose membership is to be tested.
|
||||
* @return true if {@link eperson} is a member of Group {@link groupid}.
|
||||
* @throws SQLException passed through
|
||||
*/
|
||||
public static boolean isMember(Context c, EPerson eperson, int groupid)
|
||||
throws SQLException
|
||||
{
|
||||
// Every EPerson is a member of Anonymous
|
||||
if (groupid == 0)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
return epersonInGroup(c, groupid, eperson);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all of the groups that an eperson is a member of.
|
||||
*
|
||||
|
@@ -284,6 +284,8 @@ public class EventManager
|
||||
{
|
||||
Context ctx = new Context();
|
||||
|
||||
try {
|
||||
|
||||
for (Iterator ci = ((Dispatcher) dispatcher).getConsumers()
|
||||
.iterator(); ci.hasNext();)
|
||||
{
|
||||
@@ -293,6 +295,13 @@ public class EventManager
|
||||
cp.getConsumer().finish(ctx);
|
||||
}
|
||||
}
|
||||
|
||||
ctx.complete();
|
||||
|
||||
} catch (Exception e) {
|
||||
ctx.abort();
|
||||
throw e;
|
||||
}
|
||||
return;
|
||||
|
||||
}
|
||||
|
@@ -9,89 +9,167 @@ package org.dspace.handle;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.InputStreamReader;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.storage.rdbms.DatabaseManager;
|
||||
import org.dspace.storage.rdbms.TableRow;
|
||||
import org.dspace.search.DSIndexer;
|
||||
import org.dspace.browse.IndexBrowse;
|
||||
import org.dspace.discovery.IndexClient;
|
||||
|
||||
/**
|
||||
* A script to update the handle values in the database. This is typically used
|
||||
* when moving from a test machine (handle = 123456789) to a production service.
|
||||
* when moving from a test machine (handle = 123456789) to a production service
|
||||
* or when make a test clone from production service.
|
||||
*
|
||||
* @author Stuart Lewis
|
||||
* @author Ivo Prajer (Czech Technical University in Prague)
|
||||
*/
|
||||
public class UpdateHandlePrefix
|
||||
{
|
||||
|
||||
private static final Logger log = Logger.getLogger(UpdateHandlePrefix.class);
|
||||
|
||||
/**
|
||||
* When invoked as a command-line tool, updates handle prefix
|
||||
*
|
||||
* @param args the command-line arguments, none used
|
||||
* @throws java.lang.Exception
|
||||
*
|
||||
*/
|
||||
public static void main(String[] args) throws Exception
|
||||
{
|
||||
// There should be two paramters
|
||||
// There should be two parameters
|
||||
if (args.length < 2)
|
||||
{
|
||||
System.out.println("\nUsage: update-handle-prefix <old handle> <new handle>\n");
|
||||
System.exit(1);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Confirm with the user that this is what they want to do
|
||||
String oldH = args[0];
|
||||
String newH = args[1];
|
||||
|
||||
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
|
||||
// Get info about changes
|
||||
System.out.println("\nGetting information about handles from database...");
|
||||
Context context = new Context();
|
||||
System.out.println("If you continue, all handles in your repository with prefix " +
|
||||
oldH + " will be updated to have handle prefix " + newH + "\n");
|
||||
String sql = "SELECT count(*) as count FROM handle " +
|
||||
String sql = "SELECT count(*) as count " +
|
||||
"FROM handle " +
|
||||
"WHERE handle LIKE '" + oldH + "%'";
|
||||
TableRow row = DatabaseManager.querySingle(context, sql, new Object[] {});
|
||||
long count = row.getLongColumn("count");
|
||||
System.out.println(count + " items will be updated.\n");
|
||||
System.out.print("Have you taken a backup, and are you ready to continue? [y/n]: ");
|
||||
|
||||
if (count > 0)
|
||||
{
|
||||
// Print info text about changes
|
||||
System.out.println(
|
||||
"In your repository will be updated " + count + " handle" +
|
||||
((count > 1) ? "s" : "") + " to new prefix " + newH +
|
||||
" from original " + oldH + "!\n"
|
||||
);
|
||||
|
||||
// Confirm with the user that this is what they want to do
|
||||
System.out.print(
|
||||
"Servlet container (e.g. Apache Tomcat, Jetty, Caucho Resin) must be running.\n" +
|
||||
"If it is necessary, please make a backup of the database.\n" +
|
||||
"Are you ready to continue? [y/n]: "
|
||||
);
|
||||
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
|
||||
String choiceString = input.readLine();
|
||||
|
||||
if (choiceString.equalsIgnoreCase("y"))
|
||||
{
|
||||
try {
|
||||
log.info("Updating handle prefix from " + oldH + " to " + newH);
|
||||
|
||||
// Make the changes
|
||||
System.out.print("Updating handle table... ");
|
||||
sql = "update handle set handle = '" + newH + "' || '/' || handle_id " +
|
||||
"where handle like '" + oldH + "/%'";
|
||||
int updated = DatabaseManager.updateQuery(context, sql, new Object[] {});
|
||||
System.out.println(updated + " items updated");
|
||||
System.out.print("\nUpdating handle table... ");
|
||||
sql = "UPDATE handle " +
|
||||
"SET handle = '" + newH + "' || '/' || handle_id " +
|
||||
"WHERE handle like '" + oldH + "/%'";
|
||||
int updHdl = DatabaseManager.updateQuery(context, sql, new Object[] {});
|
||||
System.out.println(
|
||||
updHdl + " item" + ((updHdl > 1) ? "s" : "") + " updated"
|
||||
);
|
||||
|
||||
System.out.print("Updating metadatavalues table... ");
|
||||
sql = "UPDATE metadatavalue SET text_value= (SELECT 'http://hdl.handle.net/' || " +
|
||||
"handle FROM handle WHERE handle.resource_id=item_id AND " +
|
||||
"handle.resource_type_id=2) WHERE text_value LIKE 'http://hdl.handle.net/%';";
|
||||
updated = DatabaseManager.updateQuery(context, sql, new Object[] {});
|
||||
System.out.println(updated + " metadata values updated");
|
||||
sql = "UPDATE metadatavalue " +
|
||||
"SET text_value = " +
|
||||
"(" +
|
||||
"SELECT 'http://hdl.handle.net/' || handle " +
|
||||
"FROM handle " +
|
||||
"WHERE handle.resource_id = metadatavalue.resource_id " +
|
||||
"AND handle.resource_type_id = 2" +
|
||||
") " +
|
||||
"WHERE text_value LIKE 'http://hdl.handle.net/" + oldH + "/%'" +
|
||||
"AND EXISTS " +
|
||||
"(" +
|
||||
"SELECT 1 " +
|
||||
"FROM handle " +
|
||||
"WHERE handle.resource_id = metadatavalue.resource_id " +
|
||||
"AND handle.resource_type_id = 2" +
|
||||
")";
|
||||
int updMeta = DatabaseManager.updateQuery(context, sql, new Object[] {});
|
||||
System.out.println(
|
||||
updMeta + " metadata value" + ((updMeta > 1) ? "s" : "") + " updated"
|
||||
);
|
||||
|
||||
// Commit the changes
|
||||
context.complete();
|
||||
|
||||
System.out.print("Re-creating browse and search indexes... ");
|
||||
log.info(
|
||||
"Done with updating handle prefix. " +
|
||||
"It was changed " + updHdl + " handle" + ((updHdl > 1) ? "s" : "") +
|
||||
" and " + updMeta + " metadata record" + ((updMeta > 1) ? "s" : "")
|
||||
);
|
||||
|
||||
// Reinitialise the browse system
|
||||
IndexBrowse.main(new String[] {"-i"});
|
||||
}
|
||||
catch (SQLException sqle)
|
||||
{
|
||||
if ((context != null) && (context.isValid()))
|
||||
{
|
||||
context.abort();
|
||||
context = null;
|
||||
}
|
||||
System.out.println("\nError during SQL operations.");
|
||||
throw sqle;
|
||||
}
|
||||
|
||||
System.out.println("Handles successfully updated in database.\n");
|
||||
System.out.println("Re-creating browse and search indexes...");
|
||||
|
||||
// Reinitialise the browse system
|
||||
try
|
||||
{
|
||||
DSIndexer.main(new String[0]);
|
||||
// Reinitialise the search and browse system
|
||||
IndexClient.main(new String[] {"-b"});
|
||||
System.out.println("Browse and search indexes are ready now.");
|
||||
// All done
|
||||
System.out.println("\nAll done successfully. Please check the DSpace logs!\n");
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
// Not a lot we can do
|
||||
System.out.println("Error re-indexing:");
|
||||
e.printStackTrace();
|
||||
System.out.println("\nPlease manually run [dspace]/bin/index-all");
|
||||
System.out.println("Error during re-indexing.");
|
||||
System.out.println(
|
||||
"\n\nAutomatic re-indexing failed. Please perform it manually.\n" +
|
||||
"You should run one of the following commands:\n\n" +
|
||||
" [dspace]/bin/dspace index-discovery -b\n\n" +
|
||||
"If you are using Solr for browse (this is the default setting).\n" +
|
||||
"When launching this command, your servlet container must be running.\n\n" +
|
||||
" [dspace]/bin/dspace index-lucene-init\n\n" +
|
||||
"If you enabled Lucene for search.\n" +
|
||||
"When launching this command, your servlet container must be shutdown.\n"
|
||||
);
|
||||
throw e;
|
||||
}
|
||||
|
||||
// All done
|
||||
System.out.println("\nHandles successfully updated.");
|
||||
}
|
||||
else
|
||||
{
|
||||
System.out.println("No changes have been made to your data.");
|
||||
System.out.println("No changes have been made to your data.\n");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
System.out.println("Nothing to do! All handles are up-to-date.\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -10,7 +10,6 @@ package org.dspace.identifier;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Metadatum;
|
||||
@@ -68,12 +67,12 @@ public class DOIIdentifierProvider
|
||||
public static final String DOI_QUALIFIER = "uri";
|
||||
|
||||
public static final Integer TO_BE_REGISTERED = 1;
|
||||
public static final Integer TO_BE_RESERVERED = 2;
|
||||
public static final Integer TO_BE_RESERVED = 2;
|
||||
public static final Integer IS_REGISTERED = 3;
|
||||
public static final Integer IS_RESERVED = 4;
|
||||
public static final Integer UPDATE_RESERVERED = 5;
|
||||
public static final Integer UPDATE_RESERVED = 5;
|
||||
public static final Integer UPDATE_REGISTERED = 6;
|
||||
public static final Integer UPDATE_BEFORE_REGISTERATION = 7;
|
||||
public static final Integer UPDATE_BEFORE_REGISTRATION = 7;
|
||||
public static final Integer TO_BE_DELETED = 8;
|
||||
public static final Integer DELETED = 9;
|
||||
|
||||
@@ -251,7 +250,7 @@ public class DOIIdentifierProvider
|
||||
return;
|
||||
}
|
||||
|
||||
doiRow.setColumn("status", TO_BE_RESERVERED);
|
||||
doiRow.setColumn("status", TO_BE_RESERVED);
|
||||
try
|
||||
{
|
||||
DatabaseManager.update(context, doiRow);
|
||||
@@ -353,11 +352,11 @@ public class DOIIdentifierProvider
|
||||
}
|
||||
else if (TO_BE_REGISTERED == doiRow.getIntColumn("status"))
|
||||
{
|
||||
doiRow.setColumn("status", UPDATE_BEFORE_REGISTERATION);
|
||||
doiRow.setColumn("status", UPDATE_BEFORE_REGISTRATION);
|
||||
}
|
||||
else if (IS_RESERVED == doiRow.getIntColumn("status"))
|
||||
{
|
||||
doiRow.setColumn("status", UPDATE_RESERVERED);
|
||||
doiRow.setColumn("status", UPDATE_RESERVED);
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -416,11 +415,11 @@ public class DOIIdentifierProvider
|
||||
{
|
||||
doiRow.setColumn("status", IS_REGISTERED);
|
||||
}
|
||||
else if (UPDATE_BEFORE_REGISTERATION == doiRow.getIntColumn("status"))
|
||||
else if (UPDATE_BEFORE_REGISTRATION == doiRow.getIntColumn("status"))
|
||||
{
|
||||
doiRow.setColumn("status", TO_BE_REGISTERED);
|
||||
}
|
||||
else if (UPDATE_RESERVERED == doiRow.getIntColumn("status"))
|
||||
else if (UPDATE_RESERVED == doiRow.getIntColumn("status"))
|
||||
{
|
||||
doiRow.setColumn("status", IS_RESERVED);
|
||||
}
|
||||
|
@@ -561,7 +561,7 @@ public class EZIDIdentifierProvider
|
||||
/**
|
||||
* Map selected DSpace metadata to fields recognized by DataCite.
|
||||
*/
|
||||
private Map<String, String> crosswalkMetadata(DSpaceObject dso)
|
||||
Map<String, String> crosswalkMetadata(DSpaceObject dso)
|
||||
{
|
||||
if ((null == dso) || !(dso instanceof Item))
|
||||
{
|
||||
@@ -632,18 +632,42 @@ public class EZIDIdentifierProvider
|
||||
mapped.put(DATACITE_PUBLICATION_YEAR, year);
|
||||
}
|
||||
|
||||
// TODO find a way to get a current direct URL to the object and set _target
|
||||
// mapped.put("_target", url);
|
||||
// Supply _target link back to this object
|
||||
String handle = dso.getHandle();
|
||||
if (null == handle)
|
||||
{
|
||||
log.warn("{} #{} has no handle -- location not set.",
|
||||
dso.getTypeText(), dso.getID());
|
||||
}
|
||||
else
|
||||
{
|
||||
String url = configurationService.getProperty("dspace.url")
|
||||
+ "/handle/" + item.getHandle();
|
||||
log.info("Supplying location: {}", url);
|
||||
mapped.put("_target", url);
|
||||
}
|
||||
|
||||
return mapped;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide a map from DSO metadata keys to EZID keys. This will drive the
|
||||
* generation of EZID metadata for the minting of new identifiers.
|
||||
*
|
||||
* @param aCrosswalk
|
||||
*/
|
||||
@Required
|
||||
public void setCrosswalk(Map<String, String> aCrosswalk)
|
||||
{
|
||||
crosswalk = aCrosswalk;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide a map from DSO metadata keys to classes which can transform their
|
||||
* values to something acceptable to EZID.
|
||||
*
|
||||
* @param transformMap
|
||||
*/
|
||||
public void setCrosswalkTransform(Map<String, Transform> transformMap)
|
||||
{
|
||||
transforms = transformMap;
|
||||
|
@@ -179,19 +179,19 @@ public class DOIOrganiser {
|
||||
|
||||
if (line.hasOption('l'))
|
||||
{
|
||||
organiser.list("reservation", null, null, DOIIdentifierProvider.TO_BE_RESERVERED);
|
||||
organiser.list("reservation", null, null, DOIIdentifierProvider.TO_BE_RESERVED);
|
||||
organiser.list("registration", null, null, DOIIdentifierProvider.TO_BE_REGISTERED);
|
||||
organiser.list("update", null, null,
|
||||
DOIIdentifierProvider.UPDATE_BEFORE_REGISTERATION,
|
||||
DOIIdentifierProvider.UPDATE_BEFORE_REGISTRATION,
|
||||
DOIIdentifierProvider.UPDATE_REGISTERED,
|
||||
DOIIdentifierProvider.UPDATE_RESERVERED);
|
||||
DOIIdentifierProvider.UPDATE_RESERVED);
|
||||
organiser.list("deletion", null, null, DOIIdentifierProvider.TO_BE_DELETED);
|
||||
}
|
||||
|
||||
if (line.hasOption('s'))
|
||||
{
|
||||
TableRowIterator it = organiser
|
||||
.getDOIsByStatus(DOIIdentifierProvider.TO_BE_RESERVERED);
|
||||
.getDOIsByStatus(DOIIdentifierProvider.TO_BE_RESERVED);
|
||||
|
||||
try {
|
||||
if (!it.hasNext())
|
||||
@@ -244,8 +244,8 @@ public class DOIOrganiser {
|
||||
if (line.hasOption('u'))
|
||||
{
|
||||
TableRowIterator it = organiser.getDOIsByStatus(
|
||||
DOIIdentifierProvider.UPDATE_BEFORE_REGISTERATION,
|
||||
DOIIdentifierProvider.UPDATE_RESERVERED,
|
||||
DOIIdentifierProvider.UPDATE_BEFORE_REGISTRATION,
|
||||
DOIIdentifierProvider.UPDATE_RESERVED,
|
||||
DOIIdentifierProvider.UPDATE_REGISTERED);
|
||||
|
||||
try {
|
||||
|
@@ -259,7 +259,7 @@ public class CCLookup {
|
||||
throws IOException{
|
||||
|
||||
// Determine the issue URL
|
||||
String issueUrl = this.cc_root + "/license/" + licenseId + "/issue";
|
||||
String issueUrl = cc_root + "/license/" + licenseId + "/issue";
|
||||
// Assemble the "answers" document
|
||||
String answer_doc = "<answers>\n<locale>" + lang + "</locale>\n" + "<license-" + licenseId + ">\n";
|
||||
Iterator keys = answers.keySet().iterator();
|
||||
@@ -411,31 +411,18 @@ public class CCLookup {
|
||||
|
||||
public String getRdf()
|
||||
throws IOException {
|
||||
String myString = null;
|
||||
java.io.ByteArrayOutputStream outputstream = new java.io.ByteArrayOutputStream();
|
||||
String result = "";
|
||||
try {
|
||||
outputstream.write("<result>\n".getBytes());
|
||||
JDOMXPath xpathRdf = new JDOMXPath("//result/rdf");
|
||||
JDOMXPath xpathLicenseRdf = new JDOMXPath("//result/licenserdf");
|
||||
XMLOutputter xmloutputter = new XMLOutputter();
|
||||
Element rdfParent = ((Element)xpathRdf.selectSingleNode(this.license_doc));
|
||||
xmloutputter.output(rdfParent, outputstream);
|
||||
Element licenseRdfParent = ((Element)xpathLicenseRdf.selectSingleNode(this.license_doc));
|
||||
outputstream.write("\n".getBytes());
|
||||
xmloutputter.output(licenseRdfParent, outputstream);
|
||||
outputstream.write("\n</result>\n".getBytes());
|
||||
result = CreativeCommons.fetchLicenseRDF(license_doc);
|
||||
} catch (Exception e) {
|
||||
log.warn("An error occurred getting the rdf . . ." + e.getMessage() );
|
||||
setSuccess(false);
|
||||
} finally {
|
||||
outputstream.close();
|
||||
return outputstream.toString();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public boolean isSuccess() {
|
||||
setSuccess(false);
|
||||
java.io.ByteArrayOutputStream outputstream = new java.io.ByteArrayOutputStream();
|
||||
JDOMXPath xp_Success = null;
|
||||
String text = null;
|
||||
try {
|
||||
|
@@ -7,13 +7,16 @@
|
||||
*/
|
||||
package org.dspace.license;
|
||||
|
||||
import java.io.*;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.StringWriter;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import javax.xml.transform.Templates;
|
||||
import javax.xml.transform.Transformer;
|
||||
import javax.xml.transform.TransformerConfigurationException;
|
||||
import javax.xml.transform.TransformerException;
|
||||
import javax.xml.transform.TransformerFactory;
|
||||
@@ -26,11 +29,14 @@ import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.BitstreamFormat;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Metadatum;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.Metadatum;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.Utils;
|
||||
import org.jdom.Document;
|
||||
import org.jdom.transform.JDOMResult;
|
||||
import org.jdom.transform.JDOMSource;
|
||||
|
||||
public class CreativeCommons
|
||||
{
|
||||
@@ -46,9 +52,17 @@ public class CreativeCommons
|
||||
|
||||
/**
|
||||
* Some BitStream Names (BSN)
|
||||
*
|
||||
* @deprecated use the metadata retrieved at {@link CreativeCommons#getCCField(String)} (see https://jira.duraspace.org/browse/DS-2604)
|
||||
*/
|
||||
@Deprecated
|
||||
private static final String BSN_LICENSE_URL = "license_url";
|
||||
|
||||
/**
|
||||
*
|
||||
* @deprecated to make uniform JSPUI and XMLUI approach the bitstream with the license in the textual format it is no longer stored (see https://jira.duraspace.org/browse/DS-2604)
|
||||
*/
|
||||
@Deprecated
|
||||
private static final String BSN_LICENSE_TEXT = "license_text";
|
||||
|
||||
private static final String BSN_LICENSE_RDF = "license_rdf";
|
||||
@@ -121,41 +135,6 @@ public class CreativeCommons
|
||||
setBitstreamFromBytes(item, bundle, BSN_LICENSE_RDF, bs_rdf_format, licenseRdf.getBytes());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This is a bit of the "do-the-right-thing" method for CC stuff in an item
|
||||
*/
|
||||
public static void setLicense(Context context, Item item,
|
||||
String cc_license_url) throws SQLException, IOException,
|
||||
AuthorizeException
|
||||
{
|
||||
Bundle bundle = getCcBundle(item);
|
||||
|
||||
// get some more information
|
||||
String license_text = fetchLicenseText(cc_license_url);
|
||||
String license_rdf = fetchLicenseRDF(cc_license_url);
|
||||
|
||||
// set the formats
|
||||
BitstreamFormat bs_url_format = BitstreamFormat.findByShortDescription(
|
||||
context, "License");
|
||||
BitstreamFormat bs_text_format = BitstreamFormat.findByShortDescription(
|
||||
context, "CC License");
|
||||
BitstreamFormat bs_rdf_format = BitstreamFormat.findByShortDescription(
|
||||
context, "RDF XML");
|
||||
|
||||
// set the URL bitstream
|
||||
setBitstreamFromBytes(item, bundle, BSN_LICENSE_URL, bs_url_format,
|
||||
cc_license_url.getBytes());
|
||||
|
||||
// set the license text bitstream
|
||||
setBitstreamFromBytes(item, bundle, BSN_LICENSE_TEXT, bs_text_format,
|
||||
license_text.getBytes());
|
||||
|
||||
// set the RDF bitstream
|
||||
setBitstreamFromBytes(item, bundle, BSN_LICENSE_RDF, bs_rdf_format,
|
||||
license_rdf.getBytes());
|
||||
}
|
||||
|
||||
/**
|
||||
* Used by DSpaceMetsIngester
|
||||
*
|
||||
@@ -224,8 +203,7 @@ public class CreativeCommons
|
||||
// verify it has correct contents
|
||||
try
|
||||
{
|
||||
if ((getLicenseURL(item) == null) || (getLicenseText(item) == null)
|
||||
|| (getLicenseRDF(item) == null))
|
||||
if ((getLicenseURL(item) == null))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
@@ -238,18 +216,6 @@ public class CreativeCommons
|
||||
return true;
|
||||
}
|
||||
|
||||
public static String getLicenseURL(Item item) throws SQLException,
|
||||
IOException, AuthorizeException
|
||||
{
|
||||
return getStringFromBitstream(item, BSN_LICENSE_URL);
|
||||
}
|
||||
|
||||
public static String getLicenseText(Item item) throws SQLException,
|
||||
IOException, AuthorizeException
|
||||
{
|
||||
return getStringFromBitstream(item, BSN_LICENSE_TEXT);
|
||||
}
|
||||
|
||||
public static String getLicenseRDF(Item item) throws SQLException,
|
||||
IOException, AuthorizeException
|
||||
{
|
||||
@@ -269,56 +235,55 @@ public class CreativeCommons
|
||||
/**
|
||||
* Get Creative Commons license Text, returning Bitstream object.
|
||||
* @return bitstream or null.
|
||||
*
|
||||
* @deprecated to make uniform JSPUI and XMLUI approach the bitstream with the license in the textual format it is no longer stored (see https://jira.duraspace.org/browse/DS-2604)
|
||||
*/
|
||||
@Deprecated
|
||||
public static Bitstream getLicenseTextBitstream(Item item) throws SQLException,
|
||||
IOException, AuthorizeException
|
||||
{
|
||||
return getBitstream(item, BSN_LICENSE_TEXT);
|
||||
}
|
||||
|
||||
public static String fetchLicenseRdf(String ccResult) {
|
||||
StringWriter result = new StringWriter();
|
||||
String licenseRdfString = new String("");
|
||||
try {
|
||||
InputStream inputstream = new ByteArrayInputStream(ccResult.getBytes("UTF-8"));
|
||||
templates.newTransformer().transform(new StreamSource(inputstream), new StreamResult(result));
|
||||
} catch (TransformerException te) {
|
||||
throw new RuntimeException("Transformer exception " + te.getMessage(), te);
|
||||
} catch (IOException ioe) {
|
||||
throw new RuntimeException("IOexception " + ioe.getCause().toString(), ioe);
|
||||
} finally {
|
||||
return result.getBuffer().toString();
|
||||
}
|
||||
/**
|
||||
* Retrieve the license text
|
||||
*
|
||||
* @param item - the item
|
||||
* @return the license in textual format
|
||||
* @throws SQLException
|
||||
* @throws IOException
|
||||
* @throws AuthorizeException
|
||||
*
|
||||
* @deprecated to make uniform JSPUI and XMLUI approach the bitstream with the license in the textual format it is no longer stored (see https://jira.duraspace.org/browse/DS-2604)
|
||||
*/
|
||||
public static String getLicenseText(Item item) throws SQLException, IOException, AuthorizeException {
|
||||
return getStringFromBitstream(item, BSN_LICENSE_TEXT);
|
||||
}
|
||||
|
||||
public static String getLicenseURL(Item item) throws SQLException, IOException, AuthorizeException {
|
||||
String licenseUri = CreativeCommons.getCCField("uri").ccItemValue(item);
|
||||
if (StringUtils.isNotBlank(licenseUri)) {
|
||||
return licenseUri;
|
||||
}
|
||||
// JSPUI backward compatibility see https://jira.duraspace.org/browse/DS-2604
|
||||
return getStringFromBitstream(item, BSN_LICENSE_URL);
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply same transformation on the document to retrieve only the most relevant part of the document passed as parameter.
|
||||
* If no transformation is needed then take in consideration to empty the CreativeCommons.xml
|
||||
*
|
||||
* The next two methods are old CC.
|
||||
* Remains until prev. usages are eliminated.
|
||||
* @Deprecated
|
||||
*
|
||||
* @param license - an element that could be contains as part of your content the license rdf
|
||||
* @return the document license in textual format after the transformation
|
||||
*/
|
||||
/**
|
||||
* Get a few license-specific properties. We expect these to be cached at
|
||||
* least per server run.
|
||||
*/
|
||||
public static String fetchLicenseText(String license_url)
|
||||
{
|
||||
String text_url = license_url;
|
||||
byte[] urlBytes = fetchURL(text_url);
|
||||
|
||||
return (urlBytes != null) ? new String(urlBytes) : "";
|
||||
}
|
||||
|
||||
public static String fetchLicenseRDF(String license_url)
|
||||
public static String fetchLicenseRDF(Document license)
|
||||
{
|
||||
StringWriter result = new StringWriter();
|
||||
|
||||
try
|
||||
{
|
||||
templates.newTransformer().transform(
|
||||
new StreamSource(license_url + "rdf"),
|
||||
new JDOMSource(license),
|
||||
new StreamResult(result)
|
||||
);
|
||||
}
|
||||
@@ -421,33 +386,6 @@ public class CreativeCommons
|
||||
return baos.toByteArray();
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch the contents of a URL
|
||||
*/
|
||||
private static byte[] fetchURL(String url_string)
|
||||
{
|
||||
try
|
||||
{
|
||||
String line = "";
|
||||
URL url = new URL(url_string);
|
||||
URLConnection connection = url.openConnection();
|
||||
InputStream inputStream = connection.getInputStream();
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
while ((line = reader.readLine()) != null)
|
||||
{
|
||||
sb.append(line);
|
||||
}
|
||||
|
||||
return sb.toString().getBytes();
|
||||
}
|
||||
catch (Exception exc)
|
||||
{
|
||||
log.error(exc.getMessage());
|
||||
return null;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Returns a metadata field handle for given field Id
|
||||
*/
|
||||
@@ -564,4 +502,34 @@ public class CreativeCommons
|
||||
item.addMetadata(params[0], params[1], params[2], params[3], value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove license information, delete also the bitstream
|
||||
*
|
||||
* @param context - DSpace Context
|
||||
* @param uriField - the metadata field for license uri
|
||||
* @param nameField - the metadata field for license name
|
||||
* @param item - the item
|
||||
* @throws AuthorizeException
|
||||
* @throws IOException
|
||||
* @throws SQLException
|
||||
*/
|
||||
public static void removeLicense(Context context, MdField uriField,
|
||||
MdField nameField, Item item) throws AuthorizeException, IOException, SQLException {
|
||||
// only remove any previous licenses
|
||||
String licenseUri = uriField.ccItemValue(item);
|
||||
if (licenseUri != null) {
|
||||
uriField.removeItemValue(item, licenseUri);
|
||||
if (ConfigurationManager.getBooleanProperty("cc.submit.setname"))
|
||||
{
|
||||
String licenseName = nameField.keyedItemValue(item, licenseUri);
|
||||
nameField.removeItemValue(item, licenseName);
|
||||
}
|
||||
if (ConfigurationManager.getBooleanProperty("cc.submit.addbitstream"))
|
||||
{
|
||||
removeLicense(context, item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -72,7 +72,7 @@ public class LicenseCleanup
|
||||
{
|
||||
|
||||
Context ctx = new Context();
|
||||
ctx.setIgnoreAuthorization(true);
|
||||
ctx.turnOffAuthorisationSystem();
|
||||
ItemIterator iter = Item.findAll(ctx);
|
||||
|
||||
Properties props = new Properties();
|
||||
|
@@ -20,6 +20,7 @@ import org.dspace.content.Bundle;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.Site;
|
||||
import org.dspace.content.WorkspaceItem;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.event.Consumer;
|
||||
@@ -52,6 +53,8 @@ public class RDFConsumer implements Consumer
|
||||
}
|
||||
|
||||
int sType = event.getSubjectType();
|
||||
log.debug(event.getEventTypeAsString() + " for "
|
||||
+ event.getSubjectTypeAsString() + ":" + event.getSubjectID());
|
||||
switch (sType)
|
||||
{
|
||||
case (Constants.BITSTREAM) :
|
||||
@@ -100,7 +103,7 @@ public class RDFConsumer implements Consumer
|
||||
Bitstream bitstream = Bitstream.find(ctx, event.getSubjectID());
|
||||
if (bitstream == null)
|
||||
{
|
||||
log.warn("Cannot find bitstream " + event.getSubjectID() + "! "
|
||||
log.debug("Cannot find bitstream " + event.getSubjectID() + "! "
|
||||
+ "Ignoring, as it is likely it was deleted "
|
||||
+ "and we'll cover it by a REMOVE event on its bundle.");
|
||||
return;
|
||||
@@ -111,6 +114,11 @@ public class RDFConsumer implements Consumer
|
||||
Item[] items = b.getItems();
|
||||
for (Item i : items)
|
||||
{
|
||||
if (WorkspaceItem.findByItem(ctx, i) != null)
|
||||
{
|
||||
log.debug("Ignoring Item " + i.getID() + " as a corresponding workspace item exists.");
|
||||
continue;
|
||||
}
|
||||
DSOIdentifier id = new DSOIdentifier(i, ctx);
|
||||
if (!this.toDelete.contains(id) && !this.toConvert.contains(id))
|
||||
{
|
||||
@@ -148,7 +156,7 @@ public class RDFConsumer implements Consumer
|
||||
Bundle bundle = Bundle.find(ctx, event.getSubjectID());
|
||||
if (bundle == null)
|
||||
{
|
||||
log.warn("Cannot find bundle " + event.getSubjectID() + "! "
|
||||
log.debug("Cannot find bundle " + event.getSubjectID() + "! "
|
||||
+ "Ignoring, as it is likely it was deleted "
|
||||
+ "and we'll cover it by a REMOVE event on its item.");
|
||||
return;
|
||||
@@ -156,6 +164,11 @@ public class RDFConsumer implements Consumer
|
||||
Item[] items = bundle.getItems();
|
||||
for (Item i : items)
|
||||
{
|
||||
if (WorkspaceItem.findByItem(ctx, i) != null)
|
||||
{
|
||||
log.debug("Ignoring Item " + i.getID() + " as a corresponding workspace item exists.");
|
||||
continue;
|
||||
}
|
||||
DSOIdentifier id = new DSOIdentifier(i, ctx);
|
||||
if (!this.toDelete.contains(id) && !this.toConvert.contains(id))
|
||||
{
|
||||
@@ -216,14 +229,24 @@ public class RDFConsumer implements Consumer
|
||||
DSpaceObject dso = event.getSubject(ctx);
|
||||
if (dso == null)
|
||||
{
|
||||
log.warn("Cannot find " + event.getSubjectTypeAsString() + " "
|
||||
log.debug("Cannot find " + event.getSubjectTypeAsString() + " "
|
||||
+ event.getSubjectID() + "! " + "Ignoring, as it is "
|
||||
+ "likely it was deleted and we'll cover it by another "
|
||||
+ "event with the type REMOVE.");
|
||||
return;
|
||||
}
|
||||
DSOIdentifier id = new DSOIdentifier(dso, ctx);
|
||||
|
||||
// ignore unfinished submissions here. Every unfinished submission
|
||||
// has an workspace item. The item flag "in_archive" doesn't help us
|
||||
// here as this is also set to false if a newer version was submitted.
|
||||
if (dso instanceof Item
|
||||
&& WorkspaceItem.findByItem(ctx, (Item) dso) != null)
|
||||
{
|
||||
log.debug("Ignoring Item " + dso.getID() + " as a corresponding workspace item exists.");
|
||||
return;
|
||||
}
|
||||
|
||||
DSOIdentifier id = new DSOIdentifier(dso, ctx);
|
||||
// If an item gets withdrawn, a MODIFIY event is fired. We have to
|
||||
// delete the item from the triple store instead of converting it.
|
||||
// we don't have to take care for reinstantions of items as they can
|
||||
|
@@ -392,7 +392,7 @@ public class DSIndexer
|
||||
{
|
||||
setBatchProcessingMode(true);
|
||||
Context context = new Context();
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
String usage = "org.dspace.search.DSIndexer [-cbhof[r <item handle>]] or nothing to update/clean an existing index.";
|
||||
Options options = new Options();
|
||||
|
@@ -26,13 +26,17 @@ import org.apache.solr.client.solrj.impl.HttpSolrServer;
|
||||
import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
|
||||
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
|
||||
import org.apache.solr.client.solrj.request.CoreAdminRequest;
|
||||
import org.apache.solr.client.solrj.request.LukeRequest;
|
||||
import org.apache.solr.client.solrj.response.FacetField;
|
||||
import org.apache.solr.client.solrj.response.LukeResponse;
|
||||
import org.apache.solr.client.solrj.response.QueryResponse;
|
||||
import org.apache.solr.client.solrj.response.RangeFacet;
|
||||
import org.apache.solr.client.solrj.response.SolrPingResponse;
|
||||
import org.apache.solr.client.solrj.util.ClientUtils;
|
||||
import org.apache.solr.common.SolrDocument;
|
||||
import org.apache.solr.common.SolrDocumentList;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.apache.solr.common.luke.FieldFlag;
|
||||
import org.apache.solr.common.params.*;
|
||||
import org.apache.solr.common.util.JavaBinCodec;
|
||||
import org.dspace.content.*;
|
||||
@@ -69,6 +73,8 @@ public class SolrLogger
|
||||
{
|
||||
private static final Logger log = Logger.getLogger(SolrLogger.class);
|
||||
|
||||
private static final String MULTIPLE_VALUES_SPLITTER = "|";
|
||||
|
||||
private static final HttpSolrServer solr;
|
||||
|
||||
public static final String DATE_FORMAT_8601 = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
|
||||
@@ -331,6 +337,7 @@ public class SolrLogger
|
||||
{
|
||||
doc1.addField("userAgent", request.getHeader("User-Agent"));
|
||||
}
|
||||
doc1.addField("isBot",isSpiderBot);
|
||||
// Save the location information if valid, save the event without
|
||||
// location information if not valid
|
||||
if(locationService != null)
|
||||
@@ -354,7 +361,7 @@ public class SolrLogger
|
||||
doc1.addField("city", location.city);
|
||||
doc1.addField("latitude", location.latitude);
|
||||
doc1.addField("longitude", location.longitude);
|
||||
doc1.addField("isBot",isSpiderBot);
|
||||
|
||||
|
||||
|
||||
}
|
||||
@@ -416,6 +423,7 @@ public class SolrLogger
|
||||
{
|
||||
doc1.addField("userAgent", userAgent);
|
||||
}
|
||||
doc1.addField("isBot",isSpiderBot);
|
||||
// Save the location information if valid, save the event without
|
||||
// location information if not valid
|
||||
if(locationService != null)
|
||||
@@ -439,7 +447,7 @@ public class SolrLogger
|
||||
doc1.addField("city", location.city);
|
||||
doc1.addField("latitude", location.latitude);
|
||||
doc1.addField("longitude", location.longitude);
|
||||
doc1.addField("isBot",isSpiderBot);
|
||||
|
||||
|
||||
|
||||
}
|
||||
@@ -1310,8 +1318,12 @@ public class SolrLogger
|
||||
yearQueryParams.put(CommonParams.FQ, filterQuery.toString());
|
||||
yearQueryParams.put(CommonParams.WT, "csv");
|
||||
|
||||
//Tell SOLR how to escape and separate the values of multi-valued fields
|
||||
yearQueryParams.put("csv.escape", "\\");
|
||||
yearQueryParams.put("csv.mv.separator", MULTIPLE_VALUES_SPLITTER);
|
||||
|
||||
//Start by creating a new core
|
||||
String coreName = "statistics-" + dcStart.getYear();
|
||||
String coreName = "statistics-" + dcStart.getYearUTC();
|
||||
HttpSolrServer statisticsYearServer = createCore(solr, coreName);
|
||||
|
||||
System.out.println("Moving: " + totalRecords + " into core " + coreName);
|
||||
@@ -1326,7 +1338,7 @@ public class SolrLogger
|
||||
HttpResponse response = new DefaultHttpClient().execute(get);
|
||||
InputStream csvInputstream = response.getEntity().getContent();
|
||||
//Write the csv ouput to a file !
|
||||
File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYear() + "." + i + ".csv");
|
||||
File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYearUTC() + "." + i + ".csv");
|
||||
FileUtils.copyInputStreamToFile(csvInputstream, csvFile);
|
||||
filesToUpload.add(csvFile);
|
||||
|
||||
@@ -1334,13 +1346,22 @@ public class SolrLogger
|
||||
yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000)));
|
||||
}
|
||||
|
||||
Set<String> multivaluedFields = getMultivaluedFieldNames();
|
||||
|
||||
for (File tempCsv : filesToUpload) {
|
||||
//Upload the data in the csv files to our new solr core
|
||||
ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest("/update/csv");
|
||||
contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
|
||||
contentStreamUpdateRequest.setParam("escape", "\\");
|
||||
contentStreamUpdateRequest.setParam("skip", "_version_");
|
||||
contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
|
||||
contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");
|
||||
|
||||
//Add parsing directives for the multivalued fields so that they are stored as separate values instead of one value
|
||||
for (String multivaluedField : multivaluedFields) {
|
||||
contentStreamUpdateRequest.setParam("f." + multivaluedField + ".split", Boolean.TRUE.toString());
|
||||
contentStreamUpdateRequest.setParam("f." + multivaluedField + ".separator", MULTIPLE_VALUES_SPLITTER);
|
||||
}
|
||||
statisticsYearServer.request(contentStreamUpdateRequest);
|
||||
}
|
||||
statisticsYearServer.commit(true, true);
|
||||
@@ -1359,6 +1380,14 @@ public class SolrLogger
|
||||
private static HttpSolrServer createCore(HttpSolrServer solr, String coreName) throws IOException, SolrServerException {
|
||||
String solrDir = ConfigurationManager.getProperty("dspace.dir") + File.separator + "solr" +File.separator;
|
||||
String baseSolrUrl = solr.getBaseURL().replace("statistics", "");
|
||||
HttpSolrServer returnServer = new HttpSolrServer(baseSolrUrl + "/" + coreName);
|
||||
try {
|
||||
SolrPingResponse ping = returnServer.ping();
|
||||
log.debug(String.format("Ping of Solr Core [%s] Returned with Status [%d]", coreName, ping.getStatus()));
|
||||
return returnServer;
|
||||
} catch(Exception e) {
|
||||
log.debug(String.format("Ping of Solr Core [%s] Failed with [%s]. New Core Will be Created", coreName, e.getClass().getName()));
|
||||
}
|
||||
CoreAdminRequest.Create create = new CoreAdminRequest.Create();
|
||||
create.setCoreName(coreName);
|
||||
create.setInstanceDir("statistics");
|
||||
@@ -1366,10 +1395,35 @@ public class SolrLogger
|
||||
HttpSolrServer solrServer = new HttpSolrServer(baseSolrUrl);
|
||||
create.process(solrServer);
|
||||
log.info("Created core with name: " + coreName);
|
||||
return new HttpSolrServer(baseSolrUrl + "/" + coreName);
|
||||
return returnServer;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Retrieves a list of all the multi valued fields in the solr core
|
||||
* @return all fields tagged as multivalued
|
||||
* @throws SolrServerException When getting the schema information from the SOLR core fails
|
||||
* @throws IOException When connection to the SOLR server fails
|
||||
*/
|
||||
public static Set<String> getMultivaluedFieldNames() throws SolrServerException, IOException {
|
||||
Set<String> multivaluedFields = new HashSet<String>();
|
||||
LukeRequest lukeRequest = new LukeRequest();
|
||||
lukeRequest.setShowSchema(true);
|
||||
LukeResponse process = lukeRequest.process(solr);
|
||||
Map<String, LukeResponse.FieldInfo> fields = process.getFieldInfo();
|
||||
for(String fieldName : fields.keySet())
|
||||
{
|
||||
LukeResponse.FieldInfo fieldInfo = fields.get(fieldName);
|
||||
EnumSet<FieldFlag> flags = fieldInfo.getFlags();
|
||||
for(FieldFlag fieldFlag : flags)
|
||||
{
|
||||
if(fieldFlag.getAbbreviation() == FieldFlag.MULTI_VALUED.getAbbreviation())
|
||||
{
|
||||
multivaluedFields.add(fieldName);
|
||||
}
|
||||
}
|
||||
}
|
||||
return multivaluedFields;
|
||||
}
|
||||
public static void reindexBitstreamHits(boolean removeDeletedBitstreams) throws Exception {
|
||||
Context context = new Context();
|
||||
|
||||
|
@@ -15,6 +15,7 @@ import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.Collections;
|
||||
import java.util.regex.Pattern;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
@@ -42,10 +43,10 @@ public class SpiderDetector {
|
||||
private static IPTable table = null;
|
||||
|
||||
/** Collection of regular expressions to match known spiders' agents. */
|
||||
private static List<Pattern> agents = new ArrayList<Pattern>();
|
||||
private static List<Pattern> agents = Collections.synchronizedList(new ArrayList<Pattern>());
|
||||
|
||||
/** Collection of regular expressions to match known spiders' domain names. */
|
||||
private static List<Pattern> domains = new ArrayList<Pattern>();
|
||||
private static List<Pattern> domains = Collections.synchronizedList(new ArrayList<Pattern>());
|
||||
|
||||
/**
|
||||
* Utility method which reads lines from a file & returns them in a Set.
|
||||
@@ -199,10 +200,12 @@ public class SpiderDetector {
|
||||
{
|
||||
// See if any agent patterns match
|
||||
if (null != agent)
|
||||
{
|
||||
synchronized(agents)
|
||||
{
|
||||
if (agents.isEmpty())
|
||||
loadPatterns("agents", agents);
|
||||
|
||||
}
|
||||
for (Pattern candidate : agents)
|
||||
{
|
||||
// prevent matcher() invocation from a null Pattern object
|
||||
@@ -230,11 +233,11 @@ public class SpiderDetector {
|
||||
// No. See if any DNS names match
|
||||
if (null != hostname)
|
||||
{
|
||||
if (domains.isEmpty())
|
||||
synchronized(domains)
|
||||
{
|
||||
if (domains.isEmpty())
|
||||
loadPatterns("domains", domains);
|
||||
}
|
||||
|
||||
for (Pattern candidate : domains)
|
||||
{
|
||||
// prevent matcher() invocation from a null Pattern object
|
||||
|
@@ -7,39 +7,21 @@
|
||||
*/
|
||||
package org.dspace.storage.rdbms;
|
||||
|
||||
import java.io.*;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DatabaseMetaData;
|
||||
import java.sql.Date;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.ResultSetMetaData;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.SQLWarning;
|
||||
import java.sql.Statement;
|
||||
import java.sql.Time;
|
||||
import java.sql.Timestamp;
|
||||
import java.sql.Types;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
import javax.naming.InitialContext;
|
||||
import javax.sql.DataSource;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.flywaydb.core.Flyway;
|
||||
import org.flywaydb.core.api.MigrationInfo;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.naming.InitialContext;
|
||||
import javax.sql.DataSource;
|
||||
import java.io.Serializable;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.sql.*;
|
||||
import java.sql.Date;
|
||||
import java.util.*;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* Executes SQL queries.
|
||||
*
|
||||
@@ -343,7 +325,7 @@ public class DatabaseManager
|
||||
try
|
||||
{
|
||||
iterator = query(context, query, parameters);
|
||||
retRow = (!iterator.hasNext()) ? null : iterator.next();
|
||||
retRow = (!iterator.hasNext()) ? null : iterator.next(context);
|
||||
} catch (SQLException e) {
|
||||
log.error("SQL query single Error - ", e);
|
||||
throw e;
|
||||
@@ -389,7 +371,7 @@ public class DatabaseManager
|
||||
|
||||
try
|
||||
{
|
||||
retRow = (!iterator.hasNext()) ? null : iterator.next();
|
||||
retRow = (!iterator.hasNext()) ? null : iterator.next(context);
|
||||
} catch (SQLException e) {
|
||||
log.error("SQL query singleTable Error - ", e);
|
||||
throw e;
|
||||
@@ -475,7 +457,7 @@ public class DatabaseManager
|
||||
|
||||
{
|
||||
try {
|
||||
TableRow row = new TableRow(canonicalize(table), getColumnNames(table));
|
||||
TableRow row = new TableRow(canonicalize(table), getColumnNames(context,table));
|
||||
insert(context, row);
|
||||
return row;
|
||||
} catch (SQLException e) {
|
||||
@@ -505,7 +487,7 @@ public class DatabaseManager
|
||||
String ctable = canonicalize(table);
|
||||
|
||||
try {
|
||||
return findByUnique(context, ctable, getPrimaryKeyColumn(ctable),
|
||||
return findByUnique(context, ctable, getPrimaryKeyColumn(context, ctable),
|
||||
Integer.valueOf(id));
|
||||
} catch (SQLException e) {
|
||||
log.error("SQL find Error - ", e);
|
||||
@@ -571,7 +553,7 @@ public class DatabaseManager
|
||||
{
|
||||
try {
|
||||
String ctable = canonicalize(table);
|
||||
return deleteByValue(context, ctable, getPrimaryKeyColumn(ctable),
|
||||
return deleteByValue(context, ctable, getPrimaryKeyColumn(context, ctable),
|
||||
Integer.valueOf(id));
|
||||
} catch (SQLException e) {
|
||||
log.error("SQL delete Error - ", e);
|
||||
@@ -691,6 +673,10 @@ public class DatabaseManager
|
||||
* @return The newly created row
|
||||
* @throws SQLException
|
||||
*/
|
||||
public static TableRow row(Context context, String table) throws SQLException
|
||||
{
|
||||
return new TableRow(canonicalize(table), getColumnNames(context, table));
|
||||
}
|
||||
public static TableRow row(String table) throws SQLException
|
||||
{
|
||||
return new TableRow(canonicalize(table), getColumnNames(table));
|
||||
@@ -718,7 +704,7 @@ public class DatabaseManager
|
||||
newID = doInsertGeneric(context, row);
|
||||
}
|
||||
|
||||
row.setColumn(getPrimaryKeyColumn(row), newID);
|
||||
row.setColumn(getPrimaryKeyColumn(context, row), newID);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -741,8 +727,8 @@ public class DatabaseManager
|
||||
.append(" set ");
|
||||
|
||||
List<ColumnInfo> columns = new ArrayList<ColumnInfo>();
|
||||
ColumnInfo pk = getPrimaryKeyColumnInfo(table);
|
||||
Collection<ColumnInfo> info = getColumnInfo(table);
|
||||
ColumnInfo pk = getPrimaryKeyColumnInfo(context, table);
|
||||
Collection<ColumnInfo> info = getColumnInfo(context, table);
|
||||
|
||||
String separator = "";
|
||||
for (ColumnInfo col : info)
|
||||
@@ -789,7 +775,7 @@ public class DatabaseManager
|
||||
throw new IllegalArgumentException("Row not associated with a table");
|
||||
}
|
||||
|
||||
String pk = getPrimaryKeyColumn(row);
|
||||
String pk = getPrimaryKeyColumn(context, row);
|
||||
|
||||
if (row.isColumnNull(pk))
|
||||
{
|
||||
@@ -808,9 +794,9 @@ public class DatabaseManager
|
||||
* @exception SQLException
|
||||
* If a database error occurs
|
||||
*/
|
||||
static Collection<ColumnInfo> getColumnInfo(String table) throws SQLException
|
||||
static Collection<ColumnInfo> getColumnInfo(Context context, String table) throws SQLException
|
||||
{
|
||||
Map<String, ColumnInfo> cinfo = getColumnInfoInternal(table);
|
||||
Map<String, ColumnInfo> cinfo = getColumnInfoInternal(context, table);
|
||||
|
||||
return (cinfo == null) ? null : cinfo.values();
|
||||
}
|
||||
@@ -826,14 +812,18 @@ public class DatabaseManager
|
||||
* @exception SQLException
|
||||
* If a database error occurs
|
||||
*/
|
||||
static ColumnInfo getColumnInfo(String table, String column)
|
||||
static ColumnInfo getColumnInfo(Context context, String table, String column)
|
||||
throws SQLException
|
||||
{
|
||||
Map<String, ColumnInfo> info = getColumnInfoInternal(table);
|
||||
Map<String, ColumnInfo> info = getColumnInfoInternal(context, table);
|
||||
|
||||
return (info == null) ? null : info.get(column);
|
||||
}
|
||||
|
||||
static List<String> getColumnNames(String table) throws SQLException{
|
||||
return getColumnNames(null,table);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the names of all the columns of the given table.
|
||||
*
|
||||
@@ -844,10 +834,10 @@ public class DatabaseManager
|
||||
* @exception SQLException
|
||||
* If a database error occurs
|
||||
*/
|
||||
static List<String> getColumnNames(String table) throws SQLException
|
||||
static List<String> getColumnNames(Context context, String table) throws SQLException
|
||||
{
|
||||
List<String> results = new ArrayList<String>();
|
||||
Collection<ColumnInfo> info = getColumnInfo(table);
|
||||
Collection<ColumnInfo> info = getColumnInfo(context, table);
|
||||
|
||||
for (ColumnInfo col : info)
|
||||
{
|
||||
@@ -881,22 +871,22 @@ public class DatabaseManager
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the canonical name for a table.
|
||||
* Return the canonical name for a database object.
|
||||
*
|
||||
* @param table
|
||||
* The name of the table.
|
||||
* @return The canonical name of the table.
|
||||
* @param db_object
|
||||
* The name of the database object.
|
||||
* @return The canonical name of the database object.
|
||||
*/
|
||||
static String canonicalize(String table)
|
||||
static String canonicalize(String db_object)
|
||||
{
|
||||
// Oracle expects upper-case table names
|
||||
// Oracle expects upper-case table names, schemas, etc.
|
||||
if (isOracle)
|
||||
{
|
||||
return (table == null) ? null : table.toUpperCase();
|
||||
return (db_object == null) ? null : db_object.toUpperCase();
|
||||
}
|
||||
|
||||
// default database postgres wants lower-case table names
|
||||
return (table == null) ? null : table.toLowerCase();
|
||||
return (db_object == null) ? null : db_object.toLowerCase();
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
@@ -914,9 +904,21 @@ public class DatabaseManager
|
||||
* @exception SQLException
|
||||
* If a database error occurs
|
||||
*/
|
||||
static TableRow process(Context context, ResultSet results, String table) throws SQLException
|
||||
{
|
||||
return process(context,results, table, null);
|
||||
}
|
||||
static TableRow process(ResultSet results, String table) throws SQLException
|
||||
{
|
||||
return process(results, table, null);
|
||||
return process(null,results, table, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated You should try to pass an existing database connection to this method to prevent opening a new one.
|
||||
*/
|
||||
@Deprecated
|
||||
static TableRow process(ResultSet results, String table, List<String> pColumnNames) throws SQLException{
|
||||
return process(null,results,table,pColumnNames);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -932,14 +934,14 @@ public class DatabaseManager
|
||||
* @exception SQLException
|
||||
* If a database error occurs
|
||||
*/
|
||||
static TableRow process(ResultSet results, String table, List<String> pColumnNames) throws SQLException
|
||||
static TableRow process(Context context, ResultSet results, String table, List<String> pColumnNames) throws SQLException
|
||||
{
|
||||
ResultSetMetaData meta = results.getMetaData();
|
||||
int columns = meta.getColumnCount() + 1;
|
||||
|
||||
// If we haven't been passed the column names try to generate them from the metadata / table
|
||||
List<String> columnNames = pColumnNames != null ? pColumnNames :
|
||||
((table == null) ? getColumnNames(meta) : getColumnNames(table));
|
||||
((table == null) ? getColumnNames(meta) : getColumnNames(context,table));
|
||||
|
||||
TableRow row = new TableRow(canonicalize(table), columnNames);
|
||||
|
||||
@@ -1062,9 +1064,9 @@ public class DatabaseManager
|
||||
* @exception SQLException
|
||||
* If a database error occurs
|
||||
*/
|
||||
public static String getPrimaryKeyColumn(TableRow row) throws SQLException
|
||||
public static String getPrimaryKeyColumn(Context context,TableRow row) throws SQLException
|
||||
{
|
||||
return getPrimaryKeyColumn(row.getTable());
|
||||
return getPrimaryKeyColumn(context,row.getTable());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1079,10 +1081,10 @@ public class DatabaseManager
|
||||
* @exception SQLException
|
||||
* If a database error occurs
|
||||
*/
|
||||
protected static String getPrimaryKeyColumn(String table)
|
||||
protected static String getPrimaryKeyColumn(Context context, String table)
|
||||
throws SQLException
|
||||
{
|
||||
ColumnInfo info = getPrimaryKeyColumnInfo(table);
|
||||
ColumnInfo info = getPrimaryKeyColumnInfo(context, table);
|
||||
|
||||
return (info == null) ? null : info.getName();
|
||||
}
|
||||
@@ -1098,9 +1100,9 @@ public class DatabaseManager
|
||||
* @exception SQLException
|
||||
* If a database error occurs
|
||||
*/
|
||||
static ColumnInfo getPrimaryKeyColumnInfo(String table) throws SQLException
|
||||
static ColumnInfo getPrimaryKeyColumnInfo(Context context, String table) throws SQLException
|
||||
{
|
||||
Collection<ColumnInfo> cinfo = getColumnInfo(canonicalize(table));
|
||||
Collection<ColumnInfo> cinfo = getColumnInfo(context, canonicalize(table));
|
||||
|
||||
for (ColumnInfo info : cinfo)
|
||||
{
|
||||
@@ -1202,7 +1204,7 @@ public class DatabaseManager
|
||||
* @exception SQLException
|
||||
* If a database error occurs
|
||||
*/
|
||||
private static Map<String, ColumnInfo> getColumnInfoInternal(String table) throws SQLException
|
||||
private static Map<String, ColumnInfo> getColumnInfoInternal(Context context, String table) throws SQLException
|
||||
{
|
||||
String ctable = canonicalize(table);
|
||||
Map<String, ColumnInfo> results = info.get(ctable);
|
||||
@@ -1212,7 +1214,7 @@ public class DatabaseManager
|
||||
return results;
|
||||
}
|
||||
|
||||
results = retrieveColumnInfo(ctable);
|
||||
results = retrieveColumnInfo(context, ctable);
|
||||
info.put(ctable, results);
|
||||
|
||||
return results;
|
||||
@@ -1229,7 +1231,7 @@ public class DatabaseManager
|
||||
* If there is a problem retrieving information from the
|
||||
* RDBMS.
|
||||
*/
|
||||
private static Map<String, ColumnInfo> retrieveColumnInfo(String table) throws SQLException
|
||||
private static Map<String, ColumnInfo> retrieveColumnInfo(Context context, String table) throws SQLException
|
||||
{
|
||||
Connection connection = null;
|
||||
ResultSet pkcolumns = null;
|
||||
@@ -1237,10 +1239,6 @@ public class DatabaseManager
|
||||
|
||||
try
|
||||
{
|
||||
String schema = ConfigurationManager.getProperty("db.schema");
|
||||
if(StringUtils.isBlank(schema)){
|
||||
schema = null;
|
||||
}
|
||||
String catalog = null;
|
||||
|
||||
int dotIndex = table.indexOf('.');
|
||||
@@ -1252,7 +1250,14 @@ public class DatabaseManager
|
||||
log.warn("table: " + table);
|
||||
}
|
||||
|
||||
if (context != null && !context.getDBConnection().isClosed()) {
|
||||
connection = context.getDBConnection();
|
||||
} else {
|
||||
connection = getConnection();
|
||||
}
|
||||
|
||||
// Get current database schema name
|
||||
String schema = DatabaseUtils.getSchemaName(connection);
|
||||
|
||||
DatabaseMetaData metadata = connection.getMetaData();
|
||||
Map<String, ColumnInfo> results = new HashMap<String, ColumnInfo>();
|
||||
@@ -1301,7 +1306,7 @@ public class DatabaseManager
|
||||
try { columns.close(); } catch (SQLException sqle) { }
|
||||
}
|
||||
|
||||
if (connection != null)
|
||||
if (connection != null && context == null) // Only close if connection is newly created in this method
|
||||
{
|
||||
try { connection.close(); } catch (SQLException sqle) { }
|
||||
}
|
||||
@@ -1674,10 +1679,10 @@ public class DatabaseManager
|
||||
{
|
||||
String table = row.getTable();
|
||||
|
||||
Collection<ColumnInfo> info = getColumnInfo(table);
|
||||
Collection<ColumnInfo> info = getColumnInfo(context, table);
|
||||
Collection<ColumnInfo> params = new ArrayList<ColumnInfo>();
|
||||
|
||||
String primaryKey = getPrimaryKeyColumn(table);
|
||||
String primaryKey = getPrimaryKeyColumn(context, table);
|
||||
String sql = insertSQL.get(table);
|
||||
|
||||
boolean firstColumn = true;
|
||||
@@ -1713,7 +1718,7 @@ public class DatabaseManager
|
||||
}
|
||||
}
|
||||
|
||||
sql = insertBuilder.append(valuesBuilder.toString()).append(") RETURNING ").append(getPrimaryKeyColumn(table)).toString();
|
||||
sql = insertBuilder.append(valuesBuilder.toString()).append(") RETURNING ").append(getPrimaryKeyColumn(context, table)).toString();
|
||||
insertSQL.put(table, sql);
|
||||
}
|
||||
else
|
||||
@@ -1828,8 +1833,8 @@ public class DatabaseManager
|
||||
}
|
||||
|
||||
// Set the ID in the table row object
|
||||
row.setColumn(getPrimaryKeyColumn(table), newID);
|
||||
Collection<ColumnInfo> info = getColumnInfo(table);
|
||||
row.setColumn(getPrimaryKeyColumn(context, table), newID);
|
||||
Collection<ColumnInfo> info = getColumnInfo(context, table);
|
||||
|
||||
String sql = insertSQL.get(table);
|
||||
if (sql == null)
|
||||
|
@@ -17,6 +17,7 @@ import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Locale;
|
||||
import javax.sql.DataSource;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
@@ -60,6 +61,11 @@ public class DatabaseUtils
|
||||
File.separator + "conf" +
|
||||
File.separator + "reindex.flag";
|
||||
|
||||
// Types of databases supported by DSpace. See getDbType()
|
||||
public static final String DBMS_POSTGRES="postgres";
|
||||
public static final String DBMS_ORACLE="oracle";
|
||||
public static final String DBMS_H2="h2";
|
||||
|
||||
/**
|
||||
* Commandline tools for managing database changes, etc.
|
||||
* @param argv
|
||||
@@ -873,8 +879,10 @@ public class DatabaseUtils
|
||||
* Get the Database Schema Name in use by this Connection, so that it can
|
||||
* be used to limit queries in other methods (e.g. tableExists()).
|
||||
* <P>
|
||||
* For PostgreSQL, schema is simply what is configured in db.schema or "public"
|
||||
* For Oracle, schema is actually the database *USER* or owner.
|
||||
* NOTE: Once we upgrade to using Apache Commons DBCP / Pool version 2.0,
|
||||
* this method WILL BE REMOVED in favor of java.sql.Connection's new
|
||||
* "getSchema()" method.
|
||||
* http://docs.oracle.com/javase/7/docs/api/java/sql/Connection.html#getSchema()
|
||||
*
|
||||
* @param connection
|
||||
* Current Database Connection
|
||||
@@ -886,27 +894,29 @@ public class DatabaseUtils
|
||||
String schema = null;
|
||||
DatabaseMetaData meta = connection.getMetaData();
|
||||
|
||||
// Determine our DB type
|
||||
// Check the configured "db.schema" FIRST for the value configured there
|
||||
schema = DatabaseManager.canonicalize(ConfigurationManager.getProperty("db.schema"));
|
||||
|
||||
// If unspecified, determine "sane" defaults based on DB type
|
||||
if(StringUtils.isBlank(schema))
|
||||
{
|
||||
String dbType = DatabaseManager.findDbKeyword(meta);
|
||||
|
||||
if(dbType.equals(DatabaseManager.DBMS_POSTGRES))
|
||||
{
|
||||
// Get the schema name from "db.schema"
|
||||
schema = ConfigurationManager.getProperty("db.schema");
|
||||
|
||||
// If unspecified, default schema is "public"
|
||||
if(StringUtils.isBlank(schema)){
|
||||
// For PostgreSQL, the default schema is named "public"
|
||||
// See: http://www.postgresql.org/docs/9.0/static/ddl-schemas.html
|
||||
schema = "public";
|
||||
}
|
||||
}
|
||||
else if (dbType.equals(DatabaseManager.DBMS_ORACLE))
|
||||
{
|
||||
// Schema is actually the user account
|
||||
// For Oracle, default schema is actually the user account
|
||||
// See: http://stackoverflow.com/a/13341390
|
||||
schema = meta.getUserName();
|
||||
}
|
||||
else
|
||||
schema = null;
|
||||
}
|
||||
|
||||
return schema;
|
||||
}
|
||||
@@ -1031,6 +1041,37 @@ public class DatabaseUtils
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine the type of Database, based on the DB connection.
|
||||
*
|
||||
* @param connection current DB Connection
|
||||
* @return a DB keyword/type (see DatabaseUtils.DBMS_* constants)
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public static String getDbType(Connection connection)
|
||||
throws SQLException
|
||||
{
|
||||
DatabaseMetaData meta = connection.getMetaData();
|
||||
String prodName = meta.getDatabaseProductName();
|
||||
String dbms_lc = prodName.toLowerCase(Locale.ROOT);
|
||||
if (dbms_lc.contains("postgresql"))
|
||||
{
|
||||
return DBMS_POSTGRES;
|
||||
}
|
||||
else if (dbms_lc.contains("oracle"))
|
||||
{
|
||||
return DBMS_ORACLE;
|
||||
}
|
||||
else if (dbms_lc.contains("h2")) // Used for unit testing only
|
||||
{
|
||||
return DBMS_H2;
|
||||
}
|
||||
else
|
||||
{
|
||||
return dbms_lc;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal class to actually perform re-indexing in a separate thread.
|
||||
* (See checkReindexDiscovery() method)>
|
||||
@@ -1065,7 +1106,7 @@ public class DatabaseUtils
|
||||
try
|
||||
{
|
||||
context = new Context();
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
log.info("Post database migration, reindexing all content in Discovery search and browse engine");
|
||||
|
||||
// Reindex Discovery completely
|
||||
|
@@ -23,18 +23,17 @@ import org.apache.commons.lang.StringUtils;
|
||||
public class MigrationUtils
|
||||
{
|
||||
/**
|
||||
* Drop a given Database Constraint (based on the current database type).
|
||||
* Drop a given Database Column Constraint (based on the current database type).
|
||||
* Returns a "checksum" for this migration which can be used as part of
|
||||
* a Flyway Java migration
|
||||
*
|
||||
* @param connection the current Database connection
|
||||
* @param tableName the name of the table the constraint applies to
|
||||
* @param columnName the name of the column the constraint applies to
|
||||
* @param constraintSuffix Only used for PostgreSQL, whose constraint naming convention depends on a suffix (key, fkey, etc)
|
||||
* @return migration checksum as an Integer
|
||||
* @throws SQLException if a database error occurs
|
||||
*/
|
||||
public static Integer dropDBConstraint(Connection connection, String tableName, String columnName, String constraintSuffix)
|
||||
public static Integer dropDBConstraint(Connection connection, String tableName, String columnName)
|
||||
throws SQLException
|
||||
{
|
||||
Integer checksum = -1;
|
||||
@@ -48,13 +47,17 @@ public class MigrationUtils
|
||||
String dbtype = DatabaseManager.findDbKeyword(meta);
|
||||
String constraintName = null;
|
||||
String constraintNameSQL = null;
|
||||
String schemaName = null;
|
||||
switch(dbtype)
|
||||
{
|
||||
case DatabaseManager.DBMS_POSTGRES:
|
||||
// In Postgres, constraints are always named:
|
||||
// {tablename}_{columnname(s)}_{suffix}
|
||||
// see: http://stackoverflow.com/a/4108266/3750035
|
||||
constraintName = StringUtils.lowerCase(tableName) + "_" + StringUtils.lowerCase(columnName) + "_" + StringUtils.lowerCase(constraintSuffix);
|
||||
// In Postgres, column constraints are listed in the "information_schema.key_column_usage" view
|
||||
// See: http://www.postgresql.org/docs/9.4/static/infoschema-key-column-usage.html
|
||||
constraintNameSQL = "SELECT DISTINCT CONSTRAINT_NAME " +
|
||||
"FROM information_schema.key_column_usage " +
|
||||
"WHERE TABLE_NAME = ? AND COLUMN_NAME = ? AND TABLE_SCHEMA = ?";
|
||||
// For Postgres, we need to limit by the schema as well
|
||||
schemaName = DatabaseUtils.getSchemaName(connection);
|
||||
break;
|
||||
case DatabaseManager.DBMS_ORACLE:
|
||||
// In Oracle, constraints are listed in the USER_CONS_COLUMNS table
|
||||
@@ -72,13 +75,15 @@ public class MigrationUtils
|
||||
throw new SQLException("DBMS " + dbtype + " is unsupported in this migration.");
|
||||
}
|
||||
|
||||
// If we have a SQL query to run for the constraint name, then run it
|
||||
if (constraintNameSQL!=null)
|
||||
{
|
||||
// Run the query to obtain the constraint name, passing it the parameters
|
||||
PreparedStatement statement = connection.prepareStatement(constraintNameSQL);
|
||||
statement.setString(1, StringUtils.upperCase(tableName));
|
||||
statement.setString(2, StringUtils.upperCase(columnName));
|
||||
statement.setString(1, DatabaseUtils.canonicalize(connection, tableName));
|
||||
statement.setString(2, DatabaseUtils.canonicalize(connection, columnName));
|
||||
// Also limit by database schema, if a schemaName has been set (only needed for PostgreSQL)
|
||||
if(schemaName!=null && !schemaName.isEmpty())
|
||||
{
|
||||
statement.setString(3, DatabaseUtils.canonicalize(connection, schemaName));
|
||||
}
|
||||
try
|
||||
{
|
||||
ResultSet results = statement.executeQuery();
|
||||
@@ -92,15 +97,24 @@ public class MigrationUtils
|
||||
{
|
||||
statement.close();
|
||||
}
|
||||
}
|
||||
|
||||
// As long as we have a constraint name, drop it
|
||||
if (constraintName!=null && !constraintName.isEmpty())
|
||||
{
|
||||
// This drop constaint SQL should be the same in all databases
|
||||
String dropConstraintSQL = "ALTER TABLE " + tableName + " DROP CONSTRAINT " + constraintName;
|
||||
// Canonicalize the constraintName
|
||||
constraintName = DatabaseUtils.canonicalize(connection, constraintName);
|
||||
// If constraintName starts with a $, surround with double quotes
|
||||
// (This is mostly for PostgreSQL, which sometimes names constraints $1, $2, etc)
|
||||
if(constraintName.startsWith("$"))
|
||||
{
|
||||
constraintName = "\"" + constraintName + "\"";
|
||||
}
|
||||
|
||||
PreparedStatement statement = connection.prepareStatement(dropConstraintSQL);
|
||||
// This drop constaint SQL should be the same in all databases
|
||||
String dropConstraintSQL = "ALTER TABLE " + DatabaseUtils.canonicalize(connection, tableName) +
|
||||
" DROP CONSTRAINT " + constraintName;
|
||||
|
||||
statement = connection.prepareStatement(dropConstraintSQL);
|
||||
try
|
||||
{
|
||||
statement.execute();
|
||||
|
@@ -8,6 +8,7 @@
|
||||
package org.dspace.storage.rdbms;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.core.Context;
|
||||
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
@@ -133,7 +134,9 @@ public class TableRowIterator
|
||||
* @return - The next row, or null if no more rows
|
||||
* @exception SQLException -
|
||||
* If a database error occurs while fetching values
|
||||
* @deprecated use {@link #next(org.dspace.core.Context)} instead. Pass an existing database connection to this method to prevent opening a new one.
|
||||
*/
|
||||
@Deprecated
|
||||
public TableRow next() throws SQLException
|
||||
{
|
||||
if (results == null)
|
||||
@@ -151,6 +154,32 @@ public class TableRowIterator
|
||||
return DatabaseManager.process(results, table, columnNames);
|
||||
}
|
||||
|
||||
/**
|
||||
* Advance to the next row and return it. Returns null if there are no more
|
||||
* rows.
|
||||
*
|
||||
* @param context An existing database connection to reuse
|
||||
* @return - The next row, or null if no more rows
|
||||
* @exception SQLException -
|
||||
* If a database error occurs while fetching values
|
||||
*/
|
||||
public TableRow next(Context context) throws SQLException
|
||||
{
|
||||
if (results == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!hasNext())
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
hasAdvanced = false;
|
||||
|
||||
return DatabaseManager.process(context, results, table, columnNames);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return true if there are more rows, false otherwise
|
||||
*
|
||||
|
@@ -13,8 +13,6 @@ import java.sql.SQLException;
|
||||
import org.dspace.storage.rdbms.MigrationUtils;
|
||||
import org.flywaydb.core.api.migration.MigrationChecksumProvider;
|
||||
import org.flywaydb.core.api.migration.jdbc.JdbcMigration;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This class is in support of the "V1.4__Upgrade_to_DSpace_1.4_schema.sql"
|
||||
@@ -42,9 +40,6 @@ import org.slf4j.LoggerFactory;
|
||||
public class V1_3_9__Drop_constraint_for_DSpace_1_4_schema
|
||||
implements JdbcMigration, MigrationChecksumProvider
|
||||
{
|
||||
/** logging category */
|
||||
private static final Logger log = LoggerFactory.getLogger(V1_3_9__Drop_constraint_for_DSpace_1_4_schema.class);
|
||||
|
||||
/* The checksum to report for this migration (when successful) */
|
||||
private int checksum = -1;
|
||||
|
||||
@@ -57,7 +52,7 @@ public class V1_3_9__Drop_constraint_for_DSpace_1_4_schema
|
||||
throws IOException, SQLException
|
||||
{
|
||||
// Drop the constraint associated with "name" column of "community"
|
||||
checksum = MigrationUtils.dropDBConstraint(connection, "community", "name", "key");
|
||||
checksum = MigrationUtils.dropDBConstraint(connection, "community", "name");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -13,8 +13,6 @@ import java.sql.SQLException;
|
||||
import org.dspace.storage.rdbms.MigrationUtils;
|
||||
import org.flywaydb.core.api.migration.MigrationChecksumProvider;
|
||||
import org.flywaydb.core.api.migration.jdbc.JdbcMigration;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This class is in support of the "V1.6__Upgrade_to_DSpace_1.6_schema.sql"
|
||||
@@ -42,9 +40,6 @@ import org.slf4j.LoggerFactory;
|
||||
public class V1_5_9__Drop_constraint_for_DSpace_1_6_schema
|
||||
implements JdbcMigration, MigrationChecksumProvider
|
||||
{
|
||||
/** logging category */
|
||||
private static final Logger log = LoggerFactory.getLogger(V1_5_9__Drop_constraint_for_DSpace_1_6_schema.class);
|
||||
|
||||
/* The checksum to report for this migration (when successful) */
|
||||
private int checksum = -1;
|
||||
|
||||
@@ -57,11 +52,11 @@ public class V1_5_9__Drop_constraint_for_DSpace_1_6_schema
|
||||
throws IOException, SQLException
|
||||
{
|
||||
// Drop the constraint associated with "collection_id" column of "community2collection" table
|
||||
int return1 = MigrationUtils.dropDBConstraint(connection, "community2collection", "collection_id", "fkey");
|
||||
int return1 = MigrationUtils.dropDBConstraint(connection, "community2collection", "collection_id");
|
||||
// Drop the constraint associated with "child_comm_id" column of "community2community" table
|
||||
int return2 = MigrationUtils.dropDBConstraint(connection, "community2community", "child_comm_id", "fkey");
|
||||
int return2 = MigrationUtils.dropDBConstraint(connection, "community2community", "child_comm_id");
|
||||
// Drop the constraint associated with "item_id" column of "collection2item" table
|
||||
int return3 = MigrationUtils.dropDBConstraint(connection, "collection2item", "item_id", "fkey");
|
||||
int return3 = MigrationUtils.dropDBConstraint(connection, "collection2item", "item_id");
|
||||
|
||||
// Checksum will just be the sum of those three return values
|
||||
checksum = return1 + return2 + return3;
|
||||
|
@@ -13,8 +13,6 @@ import java.sql.SQLException;
|
||||
import org.dspace.storage.rdbms.MigrationUtils;
|
||||
import org.flywaydb.core.api.migration.MigrationChecksumProvider;
|
||||
import org.flywaydb.core.api.migration.jdbc.JdbcMigration;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This class is in support of the DS-1582 Metadata for All Objects feature.
|
||||
@@ -43,9 +41,6 @@ import org.slf4j.LoggerFactory;
|
||||
public class V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint
|
||||
implements JdbcMigration, MigrationChecksumProvider
|
||||
{
|
||||
/** logging category */
|
||||
private static final Logger log = LoggerFactory.getLogger(V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.class);
|
||||
|
||||
/* The checksum to report for this migration (when successful) */
|
||||
private int checksum = -1;
|
||||
|
||||
@@ -58,7 +53,7 @@ public class V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint
|
||||
throws IOException, SQLException
|
||||
{
|
||||
// Drop the constraint associated with "item_id" column of "metadatavalue"
|
||||
checksum = MigrationUtils.dropDBConstraint(connection, "metadatavalue", "item_id", "fkey");
|
||||
checksum = MigrationUtils.dropDBConstraint(connection, "metadatavalue", "item_id");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -0,0 +1,58 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.storage.rdbms.migration;
|
||||
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.storage.rdbms.DatabaseUtils;
|
||||
import org.flywaydb.core.api.migration.MigrationChecksumProvider;
|
||||
import org.flywaydb.core.api.migration.jdbc.JdbcMigration;
|
||||
import org.flywaydb.core.internal.util.scanner.classpath.ClassPathResource;
|
||||
|
||||
import java.sql.Connection;
|
||||
|
||||
public class V5_7_2017_05_05__DS_3431_Add_Policies_for_BasicWorkflow
|
||||
implements JdbcMigration, MigrationChecksumProvider
|
||||
{
|
||||
|
||||
// Size of migration script run
|
||||
Integer migration_file_size = -1;
|
||||
|
||||
|
||||
@Override
|
||||
public void migrate(Connection connection) throws Exception
|
||||
{
|
||||
// Based on type of DB, get path to SQL migration script
|
||||
String dbtype = DatabaseUtils.getDbType(connection);
|
||||
|
||||
String dataMigrateSQL;
|
||||
String sqlMigrationPath = "org/dspace/storage/rdbms/sqlmigration/workflow/" + dbtype +"/";
|
||||
// Now, check if the XMLWorkflow table (cwf_workflowitem) already exists in this database
|
||||
// If XMLWorkflow Table does NOT exist in this database, then lets do the migration!
|
||||
// If XMLWorkflow Table ALREADY exists, then this migration is a noop, we assume you manually ran the sql scripts
|
||||
if (DatabaseUtils.tableExists(connection, "cwf_workflowitem"))
|
||||
{
|
||||
return;
|
||||
}else{
|
||||
//Migrate the basic workflow
|
||||
// Get the contents of our data migration script, based on path & DB type
|
||||
dataMigrateSQL = new ClassPathResource(sqlMigrationPath + "basicWorkflow" + "/V5.7_2017.05.05__DS-3431.sql",
|
||||
getClass().getClassLoader()).loadAsString(Constants.DEFAULT_ENCODING);
|
||||
}
|
||||
|
||||
// Actually execute the Data migration SQL
|
||||
// This will migrate all existing traditional workflows to the new XMLWorkflow system & tables
|
||||
DatabaseUtils.executeSql(connection, dataMigrateSQL);
|
||||
migration_file_size = dataMigrateSQL.length();
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getChecksum() {
|
||||
return migration_file_size;
|
||||
}
|
||||
}
|
@@ -98,7 +98,7 @@ public class PubmedService
|
||||
|
||||
public List<Record> search(String query) throws IOException, HttpException
|
||||
{
|
||||
List<Record> results = null;
|
||||
List<Record> results = new ArrayList<>();
|
||||
if (!ConfigurationManager.getBooleanProperty(SubmissionLookupService.CFG_MODULE, "remoteservice.demo"))
|
||||
{
|
||||
HttpGet method = null;
|
||||
|
@@ -7,31 +7,28 @@
|
||||
*/
|
||||
package org.dspace.submit.step;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Enumeration;
|
||||
import java.util.Map;
|
||||
import java.util.HashMap;
|
||||
|
||||
import javax.servlet.http.HttpSession;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import org.dspace.app.util.SubmissionInfo;
|
||||
import org.dspace.app.util.Util;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Metadatum;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.license.CreativeCommons;
|
||||
import org.dspace.license.CCLookup;
|
||||
import org.dspace.submit.AbstractProcessingStep;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import javax.servlet.http.HttpSession;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.app.util.SubmissionInfo;
|
||||
import org.dspace.app.util.Util;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.license.CCLookup;
|
||||
import org.dspace.license.CreativeCommons;
|
||||
import org.dspace.submit.AbstractProcessingStep;
|
||||
|
||||
/**
|
||||
* CCLicense step for DSpace Submission Process.
|
||||
@@ -100,11 +97,6 @@ public class CCLicenseStep extends AbstractProcessingStep
|
||||
session.setAttribute("inProgress", "TRUE");
|
||||
// check what submit button was pressed in User Interface
|
||||
String buttonPressed = Util.getSubmitButton(request, NEXT_BUTTON);
|
||||
if ("submit_grant".equalsIgnoreCase(buttonPressed)
|
||||
|| "submit_no_cc".equalsIgnoreCase(buttonPressed))
|
||||
{
|
||||
return processCC(context, request, response, subInfo);
|
||||
}
|
||||
String choiceButton = Util.getSubmitButton(request, SELECT_CHANGE);
|
||||
Enumeration e = request.getParameterNames();
|
||||
String isFieldRequired = "FALSE";
|
||||
@@ -122,27 +114,10 @@ public class CCLicenseStep extends AbstractProcessingStep
|
||||
{
|
||||
Item item = subInfo.getSubmissionItem().getItem();
|
||||
CreativeCommons.MdField uriField = CreativeCommons.getCCField("uri");
|
||||
CreativeCommons.MdField nameField = CreativeCommons.getCCField("name");
|
||||
String licenseUri = uriField.ccItemValue(item);
|
||||
if (licenseUri != null)
|
||||
//if (CreativeCommons.hasLicense(item, "dc", "rights", "uri", Item.ANY)
|
||||
// && !CreativeCommons.getRightsURI(item, "dc", "rights", "uri", Item.ANY).equals(""))
|
||||
{
|
||||
//CreativeCommons.setItemMetadata(item, licenseURI, "dc", "rights", "uri", ConfigurationManager.getProperty("default.locale"));
|
||||
uriField.removeItemValue(item, licenseUri);
|
||||
if (ConfigurationManager.getBooleanProperty("cc.submit.setname"))
|
||||
{
|
||||
String licenseName = nameField.keyedItemValue(item, licenseUri);
|
||||
nameField.removeItemValue(item, licenseName);
|
||||
//CreativeCommons.setItemMetadata(item, CreativeCommons.getRightsName(item, "dc", "rights", null, Item.ANY), "dc", "rights", null, ConfigurationManager.getProperty("default.locale"));
|
||||
}
|
||||
if (ConfigurationManager.getBooleanProperty("cc.submit.addBitstream"))
|
||||
{
|
||||
CreativeCommons.removeLicense(context, item);
|
||||
}
|
||||
removeRequiredAttributes(session);
|
||||
item.update();
|
||||
context.commit();
|
||||
}
|
||||
return STATUS_COMPLETE;
|
||||
}
|
||||
@@ -152,7 +127,7 @@ public class CCLicenseStep extends AbstractProcessingStep
|
||||
}
|
||||
if (buttonPressed.equals(NEXT_BUTTON) || buttonPressed.equals(CANCEL_BUTTON) )
|
||||
{
|
||||
return processCCWS(context, request, response, subInfo);
|
||||
return processCC(context, request, response, subInfo);
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -162,54 +137,6 @@ public class CCLicenseStep extends AbstractProcessingStep
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process the input from the CC license page
|
||||
*
|
||||
* @param context
|
||||
* current DSpace context
|
||||
* @param request
|
||||
* current servlet request object
|
||||
* @param response
|
||||
* current servlet response object
|
||||
* @param subInfo
|
||||
* submission info object
|
||||
*
|
||||
* @return Status or error flag which will be processed by
|
||||
* doPostProcessing() below! (if STATUS_COMPLETE or 0 is returned,
|
||||
* no errors occurred!)
|
||||
*/
|
||||
protected int processCC(Context context, HttpServletRequest request,
|
||||
HttpServletResponse response, SubmissionInfo subInfo)
|
||||
throws ServletException, IOException, SQLException,
|
||||
AuthorizeException
|
||||
{
|
||||
String buttonPressed = Util.getSubmitButton(request, NEXT_BUTTON);
|
||||
|
||||
// RLR hack - need to distinguish between progress bar real submission
|
||||
// (if cc_license_url exists, then users has accepted the CC License)
|
||||
String ccLicenseUrl = request.getParameter("cc_license_url");
|
||||
|
||||
if (buttonPressed.equals("submit_no_cc"))
|
||||
{
|
||||
// Skipping the CC license - remove any existing license selection
|
||||
CreativeCommons.removeLicense(context, subInfo.getSubmissionItem()
|
||||
.getItem());
|
||||
}
|
||||
else if ((ccLicenseUrl != null) && (ccLicenseUrl.length() > 0))
|
||||
{
|
||||
Item item = subInfo.getSubmissionItem().getItem();
|
||||
|
||||
// save the CC license
|
||||
CreativeCommons.setLicense(context, item, ccLicenseUrl);
|
||||
}
|
||||
|
||||
// commit changes
|
||||
context.commit();
|
||||
|
||||
// completed without errors
|
||||
return STATUS_COMPLETE;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Process the input from the CC license page using CC Web service
|
||||
@@ -228,12 +155,11 @@ public class CCLicenseStep extends AbstractProcessingStep
|
||||
* doPostProcessing() below! (if STATUS_COMPLETE or 0 is returned,
|
||||
* no errors occurred!)
|
||||
*/
|
||||
protected int processCCWS(Context context, HttpServletRequest request,
|
||||
protected int processCC(Context context, HttpServletRequest request,
|
||||
HttpServletResponse response, SubmissionInfo subInfo)
|
||||
throws ServletException, IOException, SQLException,
|
||||
AuthorizeException {
|
||||
|
||||
String ccLicenseUrl = request.getParameter("cc_license_url");
|
||||
HttpSession session = request.getSession();
|
||||
Map<String, String> map = new HashMap<String, String>();
|
||||
String licenseclass = (request.getParameter("licenseclass_chooser") != null) ? request.getParameter("licenseclass_chooser") : "";
|
||||
@@ -245,39 +171,32 @@ public class CCLicenseStep extends AbstractProcessingStep
|
||||
map.put("sampling", request.getParameter("sampling_chooser"));
|
||||
}
|
||||
map.put("jurisdiction", jurisdiction);
|
||||
CCLookup ccLookup = new CCLookup();
|
||||
|
||||
CreativeCommons.MdField uriField = CreativeCommons.getCCField("uri");
|
||||
CreativeCommons.MdField nameField = CreativeCommons.getCCField("name");
|
||||
ccLookup.issue(licenseclass, map, ConfigurationManager.getProperty("cc.license.locale"));
|
||||
Item item = subInfo.getSubmissionItem().getItem();
|
||||
if (licenseclass.equals("xmlui.Submission.submit.CCLicenseStep.no_license"))
|
||||
if ("webui.Submission.submit.CCLicenseStep.no_license".equals(licenseclass) || "xmlui.Submission.submit.CCLicenseStep.no_license".equals(licenseclass))
|
||||
{
|
||||
// only remove any previous licenses
|
||||
String licenseUri = uriField.ccItemValue(item);
|
||||
if (licenseUri != null) {
|
||||
uriField.removeItemValue(item, licenseUri);
|
||||
if (ConfigurationManager.getBooleanProperty("cc.submit.setname"))
|
||||
{
|
||||
String licenseName = nameField.keyedItemValue(item, licenseUri);
|
||||
nameField.removeItemValue(item, licenseName);
|
||||
}
|
||||
if (ConfigurationManager.getBooleanProperty("cc.submit.addBitstream"))
|
||||
{
|
||||
CreativeCommons.removeLicense(context, item);
|
||||
}
|
||||
CreativeCommons.removeLicense(context, uriField, nameField, item);
|
||||
|
||||
item.update();
|
||||
context.commit();
|
||||
removeRequiredAttributes(session);
|
||||
}
|
||||
|
||||
return STATUS_COMPLETE;
|
||||
}
|
||||
else if (licenseclass.equals("xmlui.Submission.submit.CCLicenseStep.select_change"))
|
||||
else if (StringUtils.isBlank(licenseclass) || "webui.Submission.submit.CCLicenseStep.select_change".equals(licenseclass) || "xmlui.Submission.submit.CCLicenseStep.select_change".equals(licenseclass))
|
||||
{
|
||||
removeRequiredAttributes(session);
|
||||
return STATUS_COMPLETE;
|
||||
}
|
||||
else if (ccLookup.isSuccess())
|
||||
|
||||
CCLookup ccLookup = new CCLookup();
|
||||
ccLookup.issue(licenseclass, map, ConfigurationManager.getProperty("cc.license.locale"));
|
||||
if (ccLookup.isSuccess())
|
||||
{
|
||||
CreativeCommons.removeLicense(context, uriField, nameField, item);
|
||||
|
||||
uriField.addItemValue(item, ccLookup.getLicenseUrl());
|
||||
if (ConfigurationManager.getBooleanProperty("cc.submit.addbitstream")) {
|
||||
CreativeCommons.setLicenseRDF(context, item, ccLookup.getRdf());
|
||||
@@ -285,6 +204,7 @@ public class CCLicenseStep extends AbstractProcessingStep
|
||||
if (ConfigurationManager.getBooleanProperty("cc.submit.setname")) {
|
||||
nameField.addItemValue(item, ccLookup.getLicenseName());
|
||||
}
|
||||
|
||||
item.update();
|
||||
context.commit();
|
||||
removeRequiredAttributes(session);
|
||||
@@ -303,6 +223,7 @@ public class CCLicenseStep extends AbstractProcessingStep
|
||||
return STATUS_COMPLETE;
|
||||
}
|
||||
|
||||
|
||||
private void removeRequiredAttributes(HttpSession session) {
|
||||
session.removeAttribute("ccError");
|
||||
session.removeAttribute("isFieldRequired");
|
||||
|
@@ -11,13 +11,16 @@ import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Enumeration;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import org.dspace.app.util.SubmissionInfo;
|
||||
import org.dspace.app.util.Util;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
@@ -26,8 +29,8 @@ import org.dspace.content.BitstreamFormat;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.FormatIdentifier;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.curate.Curator;
|
||||
import org.dspace.submit.AbstractProcessingStep;
|
||||
|
||||
@@ -261,6 +264,44 @@ public class UploadStep extends AbstractProcessingStep
|
||||
// -------------------------------------------------
|
||||
// Step #3: Check for a change in file description
|
||||
// -------------------------------------------------
|
||||
// We have to check for descriptions from users using the resumable upload
|
||||
// and from users using the simple upload.
|
||||
// Beginning with the resumable ones.
|
||||
Enumeration<String> parameterNames = request.getParameterNames();
|
||||
Map<String, String> descriptions = new HashMap<String, String>();
|
||||
while (parameterNames.hasMoreElements())
|
||||
{
|
||||
String name = parameterNames.nextElement();
|
||||
if (StringUtils.startsWithIgnoreCase(name, "description["))
|
||||
{
|
||||
descriptions.put(
|
||||
name.substring("description[".length(), name.length()-1),
|
||||
request.getParameter(name));
|
||||
}
|
||||
}
|
||||
if (!descriptions.isEmpty())
|
||||
{
|
||||
// we got descriptions from the resumable upload
|
||||
if (item != null)
|
||||
{
|
||||
Bundle[] bundles = item.getBundles("ORIGINAL");
|
||||
for (Bundle bundle : bundles)
|
||||
{
|
||||
Bitstream[] bitstreams = bundle.getBitstreams();
|
||||
for (Bitstream bitstream : bitstreams)
|
||||
{
|
||||
if (descriptions.containsKey(bitstream.getName()))
|
||||
{
|
||||
bitstream.setDescription(descriptions.get(bitstream.getName()));
|
||||
bitstream.update();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return STATUS_COMPLETE;
|
||||
}
|
||||
|
||||
// Going on with descriptions from the simple upload
|
||||
String fileDescription = request.getParameter("description");
|
||||
|
||||
if (fileDescription != null && fileDescription.length() > 0)
|
||||
|
@@ -10,6 +10,11 @@ package org.dspace.submit.step;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Enumeration;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Date;
|
||||
import java.util.Enumeration;
|
||||
|
||||
@@ -18,6 +23,7 @@ import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.commons.lang.time.DateUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.app.util.SubmissionInfo;
|
||||
import org.dspace.app.util.Util;
|
||||
@@ -229,6 +235,44 @@ public class UploadWithEmbargoStep extends UploadStep
|
||||
// -------------------------------------------------
|
||||
// Step #3: Check for a change in file description
|
||||
// -------------------------------------------------
|
||||
// We have to check for descriptions from users using the resumable upload
|
||||
// and from users using the simple upload.
|
||||
// Beginning with the resumable ones.
|
||||
Enumeration<String> parameterNames = request.getParameterNames();
|
||||
Map<String, String> descriptions = new HashMap<String, String>();
|
||||
while (parameterNames.hasMoreElements())
|
||||
{
|
||||
String name = parameterNames.nextElement();
|
||||
if (StringUtils.startsWithIgnoreCase(name, "description["))
|
||||
{
|
||||
descriptions.put(
|
||||
name.substring("description[".length(), name.length()-1),
|
||||
request.getParameter(name));
|
||||
}
|
||||
}
|
||||
if (!descriptions.isEmpty())
|
||||
{
|
||||
// we got descriptions from the resumable upload
|
||||
if (item != null)
|
||||
{
|
||||
Bundle[] bundles = item.getBundles("ORIGINAL");
|
||||
for (Bundle bundle : bundles)
|
||||
{
|
||||
Bitstream[] bitstreams = bundle.getBitstreams();
|
||||
for (Bitstream bitstream : bitstreams)
|
||||
{
|
||||
if (descriptions.containsKey(bitstream.getName()))
|
||||
{
|
||||
bitstream.setDescription(descriptions.get(bitstream.getName()));
|
||||
bitstream.update();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return STATUS_COMPLETE;
|
||||
}
|
||||
|
||||
// Going on with descriptions from the simple upload
|
||||
String fileDescription = request.getParameter("description");
|
||||
|
||||
if (fileDescription != null && fileDescription.length() > 0)
|
||||
@@ -352,7 +396,7 @@ public class UploadWithEmbargoStep extends UploadStep
|
||||
String fileDescription = (String) request.getAttribute(param + "-description");
|
||||
if(fileDescription==null ||fileDescription.length()==0)
|
||||
{
|
||||
request.getParameter("description");
|
||||
fileDescription = request.getParameter("description");
|
||||
}
|
||||
|
||||
// if information wasn't passed by User Interface, we had a problem
|
||||
|
747
dspace-api/src/main/java/org/dspace/util/SolrImportExport.java
Normal file
747
dspace-api/src/main/java/org/dspace/util/SolrImportExport.java
Normal file
@@ -0,0 +1,747 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.util;
|
||||
|
||||
import org.apache.commons.cli.*;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.client.solrj.impl.HttpSolrServer;
|
||||
import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
|
||||
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
|
||||
import org.apache.solr.client.solrj.request.CoreAdminRequest;
|
||||
import org.apache.solr.client.solrj.request.LukeRequest;
|
||||
import org.apache.solr.client.solrj.response.CoreAdminResponse;
|
||||
import org.apache.solr.client.solrj.response.FieldStatsInfo;
|
||||
import org.apache.solr.client.solrj.response.LukeResponse;
|
||||
import org.apache.solr.client.solrj.response.RangeFacet;
|
||||
import org.apache.solr.common.luke.FieldFlag;
|
||||
import org.apache.solr.common.params.CoreAdminParams;
|
||||
import org.apache.solr.common.params.FacetParams;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FilenameFilter;
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.nio.file.FileStore;
|
||||
import java.text.*;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* Utility class to export, clear and import Solr indexes.
|
||||
* @author Andrea Schweer schweer@waikato.ac.nz for the LCoNZ Institutional Research Repositories
|
||||
*/
|
||||
public class SolrImportExport
|
||||
{
|
||||
|
||||
private static final DateFormat SOLR_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
|
||||
private static final DateFormat SOLR_DATE_FORMAT_NO_MS = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
|
||||
private static final DateFormat EXPORT_DATE_FORMAT = new SimpleDateFormat("yyyy-MM");
|
||||
private static final String EXPORT_SEP = "_export_";
|
||||
|
||||
static
|
||||
{
|
||||
SOLR_DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("UTC"));
|
||||
EXPORT_DATE_FORMAT.setTimeZone(TimeZone.getDefault());
|
||||
}
|
||||
|
||||
private static final String ACTION_OPTION = "a";
|
||||
private static final String CLEAR_OPTION = "c";
|
||||
private static final String OVERWRITE_OPTION = "f";
|
||||
private static final String DIRECTORY_OPTION = "d";
|
||||
private static final String HELP_OPTION = "h";
|
||||
private static final String INDEX_NAME_OPTION = "i";
|
||||
private static final String KEEP_OPTION = "k";
|
||||
private static final String LAST_OPTION = "l";
|
||||
|
||||
public static final int ROWS_PER_FILE = 10_000;
|
||||
|
||||
private static final String MULTIPLE_VALUES_SPLITTER = ",";
|
||||
|
||||
private static final Logger log = Logger.getLogger(SolrImportExport.class);
|
||||
|
||||
/**
|
||||
* Entry point for command-line invocation
|
||||
* @param args command-line arguments; see help for description
|
||||
* @throws ParseException if the command-line arguments cannot be parsed
|
||||
*/
|
||||
public static void main(String[] args) throws ParseException
|
||||
{
|
||||
CommandLineParser parser = new PosixParser();
|
||||
Options options = makeOptions();
|
||||
|
||||
try
|
||||
{
|
||||
CommandLine line = parser.parse(options, args);
|
||||
if (line.hasOption(HELP_OPTION))
|
||||
{
|
||||
printHelpAndExit(options, 0);
|
||||
}
|
||||
|
||||
String[] indexNames = {"statistics"};
|
||||
if (line.hasOption(INDEX_NAME_OPTION))
|
||||
{
|
||||
indexNames = line.getOptionValues(INDEX_NAME_OPTION);
|
||||
}
|
||||
else
|
||||
{
|
||||
System.err.println("No index name provided, defaulting to \"statistics\".");
|
||||
}
|
||||
|
||||
String directoryName = makeDirectoryName(line.getOptionValue(DIRECTORY_OPTION));
|
||||
|
||||
String action = line.getOptionValue(ACTION_OPTION, "export");
|
||||
if ("import".equals(action))
|
||||
{
|
||||
for (String indexName : indexNames)
|
||||
{
|
||||
File importDir = new File(directoryName);
|
||||
if (!importDir.exists() || !importDir.canRead())
|
||||
{
|
||||
System.err.println("Import directory " + directoryName
|
||||
+ " doesn't exist or is not readable by the current user. Not importing index "
|
||||
+ indexName);
|
||||
continue; // skip this index
|
||||
}
|
||||
try
|
||||
{
|
||||
String solrUrl = makeSolrUrl(indexName);
|
||||
boolean clear = line.hasOption(CLEAR_OPTION);
|
||||
//Set overwrite to true if clear is true
|
||||
importIndex(indexName, importDir, solrUrl, clear);
|
||||
}
|
||||
catch (IOException | SolrServerException | SolrImportExportException e)
|
||||
{
|
||||
System.err.println("Problem encountered while trying to import index " + indexName + ".");
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if ("export".equals(action))
|
||||
{
|
||||
for (String indexName : indexNames)
|
||||
{
|
||||
String lastValue = line.getOptionValue(LAST_OPTION);
|
||||
File exportDir = new File(directoryName);
|
||||
if (exportDir.exists() && !exportDir.canWrite())
|
||||
{
|
||||
System.err.println("Export directory " + directoryName
|
||||
+ " is not writable by the current user. Not exporting index "
|
||||
+ indexName);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!exportDir.exists())
|
||||
{
|
||||
boolean created = exportDir.mkdirs();
|
||||
if (!created)
|
||||
{
|
||||
System.err.println("Export directory " + directoryName
|
||||
+ " could not be created. Not exporting index " + indexName);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
String solrUrl = makeSolrUrl(indexName);
|
||||
String timeField = makeTimeField(indexName);
|
||||
exportIndex(indexName, exportDir, solrUrl, timeField, lastValue, line.hasOption(OVERWRITE_OPTION));
|
||||
}
|
||||
catch (SolrServerException | IOException | SolrImportExportException e)
|
||||
{
|
||||
System.err.println("Problem encountered while trying to export index " + indexName + ".");
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if ("reindex".equals(action))
|
||||
{
|
||||
for (String indexName : indexNames)
|
||||
{
|
||||
try {
|
||||
boolean keepExport = line.hasOption(KEEP_OPTION);
|
||||
boolean overwrite = line.hasOption(OVERWRITE_OPTION);
|
||||
reindex(indexName, directoryName, keepExport, overwrite);
|
||||
} catch (IOException | SolrServerException | SolrImportExportException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
System.err.println("Unknown action " + action + "; must be import, export or reindex.");
|
||||
printHelpAndExit(options, 1);
|
||||
}
|
||||
}
|
||||
catch (ParseException e)
|
||||
{
|
||||
System.err.println("Cannot read command options");
|
||||
printHelpAndExit(options, 1);
|
||||
}
|
||||
}
|
||||
|
||||
private static Options makeOptions() {
|
||||
Options options = new Options();
|
||||
options.addOption(ACTION_OPTION, "action", true, "The action to perform: import, export or reindex. Default: export.");
|
||||
options.addOption(CLEAR_OPTION, "clear", false, "When importing, also clear the index first. Ignored when action is export or reindex.");
|
||||
options.addOption(OVERWRITE_OPTION, "force-overwrite", false, "When exporting or re-indexing, allow overwrite of existing export files");
|
||||
options.addOption(DIRECTORY_OPTION, "directory", true,
|
||||
"The absolute path for the directory to use for import or export. If omitted, [dspace]/solr-export is used.");
|
||||
options.addOption(HELP_OPTION, "help", false, "Get help on options for this command.");
|
||||
options.addOption(INDEX_NAME_OPTION, "index-name", true,
|
||||
"The names of the indexes to process. At least one is required. Available indexes are: authority, statistics.");
|
||||
options.addOption(KEEP_OPTION, "keep", false, "When reindexing, keep the contents of the data export directory." +
|
||||
" By default, the contents of this directory will be deleted once the reindex has finished." +
|
||||
" Ignored when action is export or import.");
|
||||
options.addOption(LAST_OPTION, "last", true, "When exporting, export records from the last [timeperiod] only." +
|
||||
" This can be one of: 'd' (beginning of yesterday through to now);" +
|
||||
" 'm' (beginning of the previous month through to end of the previous month);" +
|
||||
" a number, in which case the last [number] of days are exported, through to now (use 0 for today's data)." +
|
||||
" Date calculation is done in UTC. If omitted, all documents are exported.");
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reindexes the specified core
|
||||
*
|
||||
* @param indexName the name of the core to reindex
|
||||
* @param exportDirName the name of the directory to use for export. If this directory doesn't exist, it will be created.
|
||||
* @param keepExport whether to keep the contents of the exportDir after the reindex. If keepExport is false and the
|
||||
* export directory was created by this method, the export directory will be deleted at the end of the reimport.
|
||||
* @param overwrite allow export files to be overwritten during re-index
|
||||
*/
|
||||
private static void reindex(String indexName, String exportDirName, boolean keepExport, boolean overwrite)
|
||||
throws IOException, SolrServerException, SolrImportExportException {
|
||||
String tempIndexName = indexName + "-temp";
|
||||
|
||||
String origSolrUrl = makeSolrUrl(indexName);
|
||||
String baseSolrUrl = StringUtils.substringBeforeLast(origSolrUrl, "/"); // need to get non-core solr URL
|
||||
String tempSolrUrl = baseSolrUrl + "/" + tempIndexName;
|
||||
|
||||
//The configuration details for the statistics shards reside within the "statistics" folder
|
||||
String instanceIndexName = indexName.startsWith("statistics-") ? "statistics" : indexName;
|
||||
|
||||
String solrInstanceDir = ConfigurationManager.getProperty("dspace.dir") + File.separator + "solr" + File.separator + instanceIndexName;
|
||||
// the [dspace]/solr/[indexName]/conf directory needs to be available on the local machine for this to work
|
||||
// -- we need access to the schema.xml and solrconfig.xml file, plus files referenced from there
|
||||
// if this directory can't be found, output an error message and skip this index
|
||||
File solrInstance = new File(solrInstanceDir);
|
||||
if (!solrInstance.exists() || !solrInstance.canRead() || !solrInstance.isDirectory())
|
||||
{
|
||||
throw new SolrImportExportException("Directory " + solrInstanceDir + "/conf/ doesn't exist or isn't readable." +
|
||||
" The reindexing process requires the Solr configuration directory for this index to be present on the local machine" +
|
||||
" even if Solr is running on a different host. Not reindexing index " + indexName);
|
||||
}
|
||||
|
||||
String timeField = makeTimeField(indexName);
|
||||
|
||||
// Ensure the export directory exists and is writable
|
||||
File exportDir = new File(exportDirName);
|
||||
boolean createdExportDir = exportDir.mkdirs();
|
||||
if (!createdExportDir && !exportDir.exists())
|
||||
{
|
||||
throw new SolrImportExportException("Could not create export directory " + exportDirName);
|
||||
}
|
||||
if (!exportDir.canWrite())
|
||||
{
|
||||
throw new SolrImportExportException("Can't write to export directory " + exportDirName);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
HttpSolrServer adminSolr = new HttpSolrServer(baseSolrUrl);
|
||||
|
||||
// try to find out size of core and compare with free space in export directory
|
||||
CoreAdminResponse status = CoreAdminRequest.getStatus(indexName, adminSolr);
|
||||
Object coreSizeObj = status.getCoreStatus(indexName).get("sizeInBytes");
|
||||
long coreSize = coreSizeObj != null ? Long.valueOf(coreSizeObj.toString()) : -1;
|
||||
long usableExportSpace = exportDir.getUsableSpace();
|
||||
if (coreSize >= 0 && usableExportSpace < coreSize)
|
||||
{
|
||||
System.err.println("Not enough space in export directory " + exportDirName
|
||||
+ "; need at least as much space as the index ("
|
||||
+ FileUtils.byteCountToDisplaySize(coreSize)
|
||||
+ ") but usable space in export directory is only "
|
||||
+ FileUtils.byteCountToDisplaySize(usableExportSpace)
|
||||
+ ". Not continuing with reindex, please use the " + DIRECTORY_OPTION
|
||||
+ " option to specify an alternative export directy with sufficient space.");
|
||||
return;
|
||||
}
|
||||
|
||||
// Create a temp directory to store temporary core data
|
||||
File tempDataDir = new File(ConfigurationManager.getProperty("dspace.dir") + File.separator + "temp" + File.separator + "solr-data");
|
||||
boolean createdTempDataDir = tempDataDir.mkdirs();
|
||||
if (!createdTempDataDir && !tempDataDir.exists())
|
||||
{
|
||||
throw new SolrImportExportException("Could not create temporary data directory " + tempDataDir.getCanonicalPath());
|
||||
}
|
||||
if (!tempDataDir.canWrite())
|
||||
{
|
||||
throw new SolrImportExportException("Can't write to temporary data directory " + tempDataDir.getCanonicalPath());
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// create a temporary core to hold documents coming in during the reindex
|
||||
CoreAdminRequest.Create createRequest = new CoreAdminRequest.Create();
|
||||
createRequest.setInstanceDir(solrInstanceDir);
|
||||
createRequest.setDataDir(tempDataDir.getCanonicalPath());
|
||||
createRequest.setCoreName(tempIndexName);
|
||||
|
||||
createRequest.process(adminSolr).getStatus();
|
||||
}
|
||||
catch (SolrServerException e)
|
||||
{
|
||||
// try to continue -- it may just be that the core already existed from a previous, failed attempt
|
||||
System.err.println("Caught exception when trying to create temporary core: " + e.getMessage() + "; trying to recover.");
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
|
||||
// swap actual core with temporary one
|
||||
CoreAdminRequest swapRequest = new CoreAdminRequest();
|
||||
swapRequest.setCoreName(indexName);
|
||||
swapRequest.setOtherCoreName(tempIndexName);
|
||||
swapRequest.setAction(CoreAdminParams.CoreAdminAction.SWAP);
|
||||
swapRequest.process(adminSolr);
|
||||
|
||||
try
|
||||
{
|
||||
// export from the actual core (from temp core name, actual data dir)
|
||||
exportIndex(indexName, exportDir, tempSolrUrl, timeField, overwrite);
|
||||
|
||||
// clear actual core (temp core name, clearing actual data dir) & import
|
||||
importIndex(indexName, exportDir, tempSolrUrl, true);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
// we ran into some problems with the export/import -- keep going to try and restore the solr cores
|
||||
System.err.println("Encountered problem during reindex: " + e.getMessage() + ", will attempt to restore Solr cores");
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
|
||||
// commit changes
|
||||
HttpSolrServer origSolr = new HttpSolrServer(origSolrUrl);
|
||||
origSolr.commit();
|
||||
|
||||
// swap back (statistics now going to actual core name in actual data dir)
|
||||
swapRequest = new CoreAdminRequest();
|
||||
swapRequest.setCoreName(tempIndexName);
|
||||
swapRequest.setOtherCoreName(indexName);
|
||||
swapRequest.setAction(CoreAdminParams.CoreAdminAction.SWAP);
|
||||
swapRequest.process(adminSolr);
|
||||
|
||||
// export all docs from now-temp core into export directory -- this won't cause name collisions with the actual export
|
||||
// because the core name for the temporary export has -temp in it while the actual core doesn't
|
||||
exportIndex(tempIndexName, exportDir, tempSolrUrl, timeField, overwrite);
|
||||
// ...and import them into the now-again-actual core *without* clearing
|
||||
importIndex(tempIndexName, exportDir, origSolrUrl, false);
|
||||
|
||||
// commit changes
|
||||
origSolr.commit();
|
||||
|
||||
// unload now-temp core (temp core name)
|
||||
CoreAdminRequest.unloadCore(tempIndexName, false, false, adminSolr);
|
||||
|
||||
// clean up temporary data dir if this method created it
|
||||
if (createdTempDataDir && tempDataDir.exists())
|
||||
{
|
||||
FileUtils.deleteDirectory(tempDataDir);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
// clean up export dir if appropriate
|
||||
if (!keepExport && createdExportDir && exportDir.exists())
|
||||
{
|
||||
FileUtils.deleteDirectory(exportDir);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Exports all documents in the given index to the specified target directory in batches of #ROWS_PER_FILE.
|
||||
* See #makeExportFilename for the file names that are generated.
|
||||
*
|
||||
* @param indexName The index to export.
|
||||
* @param toDir The target directory for the export. Will be created if it doesn't exist yet. The directory must be writeable.
|
||||
* @param solrUrl The solr URL for the index to export. Must not be null.
|
||||
* @param timeField The time field to use for sorting the export. Must not be null.
|
||||
* @param overwrite If set, allow export files to be overwritten
|
||||
* @throws SolrServerException if there is a problem with exporting the index.
|
||||
* @throws IOException if there is a problem creating the files or communicating with Solr.
|
||||
* @throws SolrImportExportException if there is a problem in communicating with Solr.
|
||||
*/
|
||||
public static void exportIndex(String indexName, File toDir, String solrUrl, String timeField, boolean overwrite)
|
||||
throws SolrServerException, SolrImportExportException, IOException {
|
||||
exportIndex(indexName, toDir, solrUrl, timeField, null, overwrite);
|
||||
}
|
||||
|
||||
/**
|
||||
* Import previously exported documents (or externally created CSV files that have the appropriate structure) into the specified index.
|
||||
* @param indexName the index to import.
|
||||
* @param fromDir the source directory. Must exist and be readable.
|
||||
* The importer will look for files whose name starts with <pre>indexName</pre>
|
||||
* and ends with .csv (to match what is generated by #makeExportFilename).
|
||||
* @param solrUrl The solr URL for the index to export. Must not be null.
|
||||
* @param clear if true, clear the index before importing.
|
||||
* @param overwrite if true, skip _version_ field on import to disable Solr's optimistic concurrency functionality
|
||||
* @throws IOException if there is a problem reading the files or communicating with Solr.
|
||||
* @throws SolrServerException if there is a problem reading the files or communicating with Solr.
|
||||
* @throws SolrImportExportException if there is a problem communicating with Solr.
|
||||
*/
|
||||
public static void importIndex(final String indexName, File fromDir, String solrUrl, boolean clear)
|
||||
throws IOException, SolrServerException, SolrImportExportException
|
||||
{
|
||||
if (StringUtils.isBlank(solrUrl))
|
||||
{
|
||||
throw new SolrImportExportException("Could not construct solr URL for index" + indexName + ", aborting export.");
|
||||
}
|
||||
|
||||
if (!fromDir.exists() || !fromDir.canRead())
|
||||
{
|
||||
throw new SolrImportExportException("Source directory " + fromDir
|
||||
+ " doesn't exist or isn't readable, aborting export of index "
|
||||
+ indexName);
|
||||
}
|
||||
|
||||
HttpSolrServer solr = new HttpSolrServer(solrUrl);
|
||||
|
||||
// must get multivalue fields before clearing
|
||||
List<String> multivaluedFields = getMultiValuedFields(solr);
|
||||
|
||||
if (clear)
|
||||
{
|
||||
clearIndex(solrUrl);
|
||||
}
|
||||
|
||||
File[] files = fromDir.listFiles(new FilenameFilter()
|
||||
{
|
||||
@Override
|
||||
public boolean accept(File dir, String name)
|
||||
{
|
||||
return name.startsWith(indexName+EXPORT_SEP) && name.endsWith(".csv");
|
||||
}
|
||||
});
|
||||
|
||||
if (files == null || files.length == 0)
|
||||
{
|
||||
log.warn("No export files found in directory " + fromDir.getCanonicalPath() + " for index " + indexName);
|
||||
return;
|
||||
}
|
||||
|
||||
Arrays.sort(files);
|
||||
|
||||
for (File file : files)
|
||||
{
|
||||
log.info("Importing file " + file.getCanonicalPath());
|
||||
ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest("/update/csv");
|
||||
contentStreamUpdateRequest.setParam("skip", "_version_");
|
||||
for (String mvField : multivaluedFields) {
|
||||
contentStreamUpdateRequest.setParam("f." + mvField + ".split", "true");
|
||||
contentStreamUpdateRequest.setParam("f." + mvField + ".separator", MULTIPLE_VALUES_SPLITTER);
|
||||
}
|
||||
contentStreamUpdateRequest.setParam("stream.contentType", "text/csv;charset=utf-8");
|
||||
contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
|
||||
contentStreamUpdateRequest.addFile(file, "text/csv;charset=utf-8");
|
||||
|
||||
solr.request(contentStreamUpdateRequest);
|
||||
}
|
||||
|
||||
solr.commit(true, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine the names of all multi-valued fields from the data in the index.
|
||||
* @param solr the solr server to query.
|
||||
* @return A list containing all multi-valued fields, or an empty list if none are found / there aren't any.
|
||||
*/
|
||||
private static List<String> getMultiValuedFields(HttpSolrServer solr)
|
||||
{
|
||||
List<String> result = new ArrayList<>();
|
||||
try
|
||||
{
|
||||
LukeRequest request = new LukeRequest();
|
||||
// this needs to be a non-schema request, otherwise we'll miss dynamic fields
|
||||
LukeResponse response = request.process(solr);
|
||||
Map<String, LukeResponse.FieldInfo> fields = response.getFieldInfo();
|
||||
for (LukeResponse.FieldInfo info : fields.values())
|
||||
{
|
||||
if (info.getSchema().contains(FieldFlag.MULTI_VALUED.getAbbreviation() + ""))
|
||||
{
|
||||
result.add(info.getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (IOException | SolrServerException e)
|
||||
{
|
||||
log.fatal("Cannot determine which fields are multi valued: " + e.getMessage(), e);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all documents from the Solr index with the given URL, then commit and optimise the index.
|
||||
*
|
||||
* @throws IOException if there is a problem in communicating with Solr.
|
||||
* @throws SolrServerException if there is a problem in communicating with Solr.
|
||||
* @param solrUrl URL of the Solr core to clear.
|
||||
*/
|
||||
public static void clearIndex(String solrUrl) throws IOException, SolrServerException
|
||||
{
|
||||
HttpSolrServer solr = new HttpSolrServer(solrUrl);
|
||||
solr.deleteByQuery("*:*");
|
||||
solr.commit();
|
||||
solr.optimize();
|
||||
}
|
||||
|
||||
/**
|
||||
* Exports documents from the given index to the specified target directory in batches of #ROWS_PER_FILE, starting at fromWhen (or all documents).
|
||||
* See #makeExportFilename for the file names that are generated.
|
||||
*
|
||||
* @param indexName The index to export.
|
||||
* @param toDir The target directory for the export. Will be created if it doesn't exist yet. The directory must be writeable.
|
||||
* @param solrUrl The solr URL for the index to export. Must not be null.
|
||||
* @param timeField The time field to use for sorting the export. Must not be null.
|
||||
* @param fromWhen Optionally, from when to export. See options for allowed values. If null or empty, all documents will be exported.
|
||||
* @param overwrite If set, allow export files to be overwritten
|
||||
* @throws SolrServerException if there is a problem with exporting the index.
|
||||
* @throws IOException if there is a problem creating the files or communicating with Solr.
|
||||
* @throws SolrImportExportException if there is a problem in communicating with Solr.
|
||||
*/
|
||||
public static void exportIndex(String indexName, File toDir, String solrUrl, String timeField, String fromWhen, boolean overwrite)
|
||||
throws SolrServerException, IOException, SolrImportExportException
|
||||
{
|
||||
log.info(String.format("Export Index [%s] to [%s] using [%s] Time Field[%s] FromWhen[%s]", indexName, toDir, solrUrl, timeField, fromWhen));
|
||||
if (StringUtils.isBlank(solrUrl))
|
||||
{
|
||||
throw new SolrImportExportException("Could not construct solr URL for index" + indexName + ", aborting export.");
|
||||
}
|
||||
|
||||
if (!toDir.exists() || !toDir.canWrite())
|
||||
{
|
||||
throw new SolrImportExportException("Target directory " + toDir
|
||||
+ " doesn't exist or is not writable, aborting export of index "
|
||||
+ indexName);
|
||||
}
|
||||
|
||||
HttpSolrServer solr = new HttpSolrServer(solrUrl);
|
||||
|
||||
SolrQuery query = new SolrQuery("*:*");
|
||||
if (StringUtils.isNotBlank(fromWhen))
|
||||
{
|
||||
String lastValueFilter = makeFilterQuery(timeField, fromWhen);
|
||||
if (StringUtils.isNotBlank(lastValueFilter))
|
||||
{
|
||||
query.addFilterQuery(lastValueFilter);
|
||||
}
|
||||
}
|
||||
|
||||
query.setRows(0);
|
||||
query.setGetFieldStatistics(timeField);
|
||||
Map<String, FieldStatsInfo> fieldInfo = solr.query(query).getFieldStatsInfo();
|
||||
if (fieldInfo == null || !fieldInfo.containsKey(timeField)) {
|
||||
log.warn(String.format("Queried [%s]. No fieldInfo found while exporting index [%s] time field [%s] from [%s]. Export cancelled.",
|
||||
solrUrl, indexName, timeField, fromWhen));
|
||||
return;
|
||||
}
|
||||
FieldStatsInfo timeFieldInfo = fieldInfo.get(timeField);
|
||||
if (timeFieldInfo == null || timeFieldInfo.getMin() == null) {
|
||||
log.warn(String.format("Queried [%s]. No earliest date found while exporting index [%s] time field [%s] from [%s]. Export cancelled.",
|
||||
solrUrl, indexName, timeField, fromWhen));
|
||||
return;
|
||||
}
|
||||
Date earliestTimestamp = (Date) timeFieldInfo.getMin();
|
||||
|
||||
query.setGetFieldStatistics(false);
|
||||
query.clearSorts();
|
||||
query.setRows(0);
|
||||
query.setFacet(true);
|
||||
query.add(FacetParams.FACET_RANGE, timeField);
|
||||
query.add(FacetParams.FACET_RANGE_START, SOLR_DATE_FORMAT.format(earliestTimestamp) + "/MONTH");
|
||||
query.add(FacetParams.FACET_RANGE_END, "NOW/MONTH+1MONTH");
|
||||
query.add(FacetParams.FACET_RANGE_GAP, "+1MONTH");
|
||||
query.setFacetMinCount(1);
|
||||
|
||||
List<RangeFacet.Count> monthFacets = solr.query(query).getFacetRanges().get(0).getCounts();
|
||||
|
||||
for (RangeFacet.Count monthFacet : monthFacets) {
|
||||
Date monthStartDate;
|
||||
String monthStart = monthFacet.getValue();
|
||||
try
|
||||
{
|
||||
monthStartDate = SOLR_DATE_FORMAT_NO_MS.parse(monthStart);
|
||||
}
|
||||
catch (java.text.ParseException e)
|
||||
{
|
||||
throw new SolrImportExportException("Could not read start of month batch as date: " + monthStart, e);
|
||||
}
|
||||
int docsThisMonth = monthFacet.getCount();
|
||||
|
||||
SolrQuery monthQuery = new SolrQuery("*:*");
|
||||
monthQuery.setRows(ROWS_PER_FILE);
|
||||
monthQuery.set("wt", "csv");
|
||||
monthQuery.set("fl", "*");
|
||||
monthQuery.setParam("csv.mv.separator", MULTIPLE_VALUES_SPLITTER);
|
||||
|
||||
monthQuery.addFilterQuery(timeField + ":[" +monthStart + " TO " + monthStart + "+1MONTH]");
|
||||
|
||||
for (int i = 0; i < docsThisMonth; i+= ROWS_PER_FILE)
|
||||
{
|
||||
monthQuery.setStart(i);
|
||||
URL url = new URL(solrUrl + "/select?" + monthQuery.toString());
|
||||
|
||||
File file = new File(toDir.getCanonicalPath(), makeExportFilename(indexName, monthStartDate, docsThisMonth, i));
|
||||
if (file.createNewFile() || overwrite)
|
||||
{
|
||||
FileUtils.copyURLToFile(url, file);
|
||||
String message = String.format("Solr export to file [%s] complete. Export for Index [%s] Month [%s] Batch [%d] Num Docs [%d]",
|
||||
file.getCanonicalPath(), indexName, monthStart, i, docsThisMonth);
|
||||
log.info(message);
|
||||
}
|
||||
else if (file.exists())
|
||||
{
|
||||
String message = String.format("Solr export file [%s] already exists. Export failed for Index [%s] Month [%s] Batch [%d] Num Docs [%d]",
|
||||
file.getCanonicalPath(), indexName, monthStart, i, docsThisMonth);
|
||||
throw new SolrImportExportException(message);
|
||||
}
|
||||
else
|
||||
{
|
||||
String message = String.format("Cannot create solr export file [%s]. Export failed for Index [%s] Month [%s] Batch [%d] Num Docs [%d]",
|
||||
file.getCanonicalPath(), indexName, monthStart, i, docsThisMonth);
|
||||
throw new SolrImportExportException(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a filter query that represents the export date range passed in as lastValue
|
||||
* @param timeField the time field to use for the date range
|
||||
* @param lastValue the requested date range, see options for acceptable values
|
||||
* @return a filter query representing the date range, or null if no suitable date range can be created.
|
||||
*/
|
||||
private static String makeFilterQuery(String timeField, String lastValue) {
|
||||
if ("m".equals(lastValue))
|
||||
{
|
||||
// export data from the previous month
|
||||
return timeField + ":[NOW/MONTH-1MONTH TO NOW/MONTH]";
|
||||
}
|
||||
|
||||
int days;
|
||||
if ("d".equals(lastValue))
|
||||
{
|
||||
days = 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
// other acceptable value: a number, specifying how many days back to export
|
||||
days = Integer.valueOf(lastValue); // TODO check value?
|
||||
}
|
||||
return timeField + ":[NOW/DAY-" + days + "DAYS TO " + SOLR_DATE_FORMAT.format(new Date()) + "]";
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the specified directory name or fall back to a default value.
|
||||
*
|
||||
* @param directoryValue a specific directory name. Optional.
|
||||
* @return directoryValue if given as a non-blank string. A default directory otherwise.
|
||||
*/
|
||||
private static String makeDirectoryName(String directoryValue)
|
||||
{
|
||||
if (StringUtils.isNotBlank(directoryValue))
|
||||
{
|
||||
return directoryValue;
|
||||
}
|
||||
return ConfigurationManager.getProperty("dspace.dir") + File.separator + "solr-export" + File.separator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a filename for the export batch.
|
||||
*
|
||||
* @param indexName The name of the index being exported.
|
||||
* @param exportStart The start timestamp of the export
|
||||
* @param totalRecords The total number of records in the export.
|
||||
* @param index The index of the current batch.
|
||||
* @return A file name that is appropriate to use for exporting the batch of data described by the parameters.
|
||||
*/
|
||||
private static String makeExportFilename(String indexName, Date exportStart, long totalRecords, int index)
|
||||
{
|
||||
String exportFileNumber = "";
|
||||
if (totalRecords > ROWS_PER_FILE) {
|
||||
exportFileNumber = StringUtils.leftPad("" + (index / ROWS_PER_FILE), (int) Math.ceil(Math.log10(totalRecords / ROWS_PER_FILE)), "0");
|
||||
}
|
||||
return indexName
|
||||
+ EXPORT_SEP
|
||||
+ EXPORT_DATE_FORMAT.format(exportStart)
|
||||
+ (StringUtils.isNotBlank(exportFileNumber) ? "_" + exportFileNumber : "")
|
||||
+ ".csv";
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the full URL for the specified index name.
|
||||
*
|
||||
* @param indexName the index name whose Solr URL is required. If the index name starts with
|
||||
* "statistics" or is "authority", the Solr base URL will be looked up
|
||||
* in the corresponding DSpace configuration file. Otherwise, it will fall back to a default.
|
||||
* @return the full URL to the Solr index, as a String.
|
||||
*/
|
||||
private static String makeSolrUrl(String indexName)
|
||||
{
|
||||
if (indexName.startsWith("statistics"))
|
||||
{
|
||||
// TODO account for year shards properly?
|
||||
return ConfigurationManager.getProperty("solr-statistics", "server") + indexName.replaceFirst("statistics", "");
|
||||
}
|
||||
else if ("authority".equals(indexName))
|
||||
{
|
||||
return ConfigurationManager.getProperty("solr.authority.server");
|
||||
}
|
||||
return "http://localhost:8080/solr/" + indexName; // TODO better default?
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a time field for the specified index name that is suitable for incremental export.
|
||||
*
|
||||
* @param indexName the index name whose Solr URL is required.
|
||||
* @return the name of the time field, or null if no suitable field can be determined.
|
||||
*/
|
||||
private static String makeTimeField(String indexName)
|
||||
{
|
||||
if (indexName.startsWith("statistics"))
|
||||
{
|
||||
return "time";
|
||||
}
|
||||
else if ("authority".equals(indexName))
|
||||
{
|
||||
return "last_modified_date";
|
||||
}
|
||||
return null; // TODO some sort of default?
|
||||
}
|
||||
|
||||
/**
|
||||
* A utility method to print out all available command-line options and exit given the specified code.
|
||||
*
|
||||
* @param options the supported options.
|
||||
* @param exitCode the exit code to use. The method will call System#exit(int) with the given code.
|
||||
*/
|
||||
private static void printHelpAndExit(Options options, int exitCode)
|
||||
{
|
||||
HelpFormatter myhelp = new HelpFormatter();
|
||||
myhelp.printHelp(SolrImportExport.class.getSimpleName() + "\n", options);
|
||||
System.out.println("\n\nCommand Defaults");
|
||||
System.out.println("\tsolr-export-statistics [-a export] [-i statistics]");
|
||||
System.out.println("\tsolr-import-statistics [-a import] [-i statistics]");
|
||||
System.out.println("\tsolr-reindex-statistics [-a reindex] [-i statistics]");
|
||||
System.exit(exitCode);
|
||||
}
|
||||
}
|
@@ -0,0 +1,24 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.util;
|
||||
|
||||
/**
|
||||
* @author Andrea Schweer schweer@waikato.ac.nz for the LCoNZ Institutional Research Repositories
|
||||
*/
|
||||
public class SolrImportExportException extends Exception
|
||||
{
|
||||
public SolrImportExportException(String message)
|
||||
{
|
||||
super(message);
|
||||
}
|
||||
|
||||
public SolrImportExportException(String message, Throwable cause)
|
||||
{
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
@@ -13,7 +13,10 @@ import org.dspace.core.Context;
|
||||
import org.dspace.storage.bitstore.BitstreamStorageManager;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import org.dspace.authorize.AuthorizeManager;
|
||||
import org.dspace.authorize.ResourcePolicy;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -46,13 +49,32 @@ public abstract class AbstractVersionProvider {
|
||||
for(Bundle nativeBundle : nativeItem.getBundles())
|
||||
{
|
||||
Bundle bundleNew = itemNew.createBundle(nativeBundle.getName());
|
||||
// DSpace knows several types of resource policies (see the class
|
||||
// org.dspace.authorize.ResourcePolicy): Submission, Workflow, Custom
|
||||
// and inherited. Submission, Workflow and Inherited policies will be
|
||||
// set automatically as neccessary. We need to copy the custom policies
|
||||
// only to preserve customly set policies and embargos (which are
|
||||
// realized by custom policies with a start date).
|
||||
List<ResourcePolicy> bundlePolicies =
|
||||
AuthorizeManager.findPoliciesByDSOAndType(c, nativeBundle, ResourcePolicy.TYPE_CUSTOM);
|
||||
AuthorizeManager.addPolicies(c, bundlePolicies, bundleNew);
|
||||
|
||||
for(Bitstream nativeBitstream : nativeBundle.getBitstreams())
|
||||
{
|
||||
|
||||
Bitstream bitstreamNew = createBitstream(c, nativeBitstream);
|
||||
|
||||
bundleNew.addBitstream(bitstreamNew);
|
||||
|
||||
// NOTE: bundle.addBitstream() causes Bundle policies to be inherited by default.
|
||||
// So, we need to REMOVE any inherited TYPE_CUSTOM policies before copying over the correct ones.
|
||||
AuthorizeManager.removeAllPoliciesByDSOAndType(c, bitstreamNew, ResourcePolicy.TYPE_CUSTOM);
|
||||
|
||||
// Now, we need to copy the TYPE_CUSTOM resource policies from old bitstream
|
||||
// to the new bitstream, like we did above for bundles
|
||||
List<ResourcePolicy> bitstreamPolicies =
|
||||
AuthorizeManager.findPoliciesByDSOAndType(c, nativeBitstream, ResourcePolicy.TYPE_CUSTOM);
|
||||
AuthorizeManager.addPolicies(c, bitstreamPolicies, bitstreamNew);
|
||||
|
||||
if(nativeBundle.getPrimaryBitstreamID() == nativeBitstream.getID())
|
||||
{
|
||||
bundleNew.setPrimaryBitstreamID(bitstreamNew.getID());
|
||||
|
@@ -17,6 +17,9 @@ import org.dspace.utils.DSpace;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import org.dspace.authorize.AuthorizeManager;
|
||||
import org.dspace.authorize.ResourcePolicy;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -84,6 +87,15 @@ public class DefaultItemVersionProvider extends AbstractVersionProvider implemen
|
||||
} catch (IdentifierException e) {
|
||||
throw new RuntimeException("Can't create Identifier!");
|
||||
}
|
||||
// DSpace knows several types of resource policies (see the class
|
||||
// org.dspace.authorize.ResourcePolicy): Submission, Workflow, Custom
|
||||
// and inherited. Submission, Workflow and Inherited policies will be
|
||||
// set automatically as neccessary. We need to copy the custom policies
|
||||
// only to preserve customly set policies and embargos (which are
|
||||
// realized by custom policies with a start date).
|
||||
List<ResourcePolicy> policies =
|
||||
AuthorizeManager.findPoliciesByDSOAndType(c, previousItem, ResourcePolicy.TYPE_CUSTOM);
|
||||
AuthorizeManager.addPolicies(c, policies, itemNew);
|
||||
itemNew.update();
|
||||
return itemNew;
|
||||
}catch (SQLException e) {
|
||||
|
@@ -23,6 +23,9 @@ import org.apache.log4j.Logger;
|
||||
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.authorize.AuthorizeManager;
|
||||
import org.dspace.authorize.ResourcePolicy;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.DCDate;
|
||||
import org.dspace.content.Metadatum;
|
||||
@@ -30,6 +33,7 @@ import org.dspace.content.InstallItem;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.WorkspaceItem;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.Email;
|
||||
import org.dspace.core.I18nUtil;
|
||||
@@ -38,6 +42,7 @@ import org.dspace.curate.WorkflowCurator;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.handle.HandleManager;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.storage.rdbms.DatabaseManager;
|
||||
import org.dspace.storage.rdbms.TableRow;
|
||||
import org.dspace.storage.rdbms.TableRowIterator;
|
||||
@@ -45,28 +50,31 @@ import org.dspace.usage.UsageWorkflowEvent;
|
||||
import org.dspace.utils.DSpace;
|
||||
|
||||
/**
|
||||
* Workflow state machine
|
||||
* Workflow state machine.
|
||||
*
|
||||
* Notes:
|
||||
* <p>Notes:
|
||||
*
|
||||
* Determining item status from the database:
|
||||
* <p>Determining item status from the database:
|
||||
*
|
||||
* When an item has not been submitted yet, it is in the user's personal
|
||||
* <ul>
|
||||
* <li>When an item has not been submitted yet, it is in the user's personal
|
||||
* workspace (there is a row in PersonalWorkspace pointing to it.)
|
||||
*
|
||||
* When an item is submitted and is somewhere in a workflow, it has a row in the
|
||||
* <li>When an item is submitted and is somewhere in a workflow, it has a row in the
|
||||
* WorkflowItem table pointing to it. The state of the workflow can be
|
||||
* determined by looking at WorkflowItem.getState()
|
||||
* determined by looking at {@link WorkflowItem#getState()}
|
||||
*
|
||||
* When a submission is complete, the WorkflowItem pointing to the item is
|
||||
* destroyed and the archive() method is called, which hooks the item up to the
|
||||
* archive.
|
||||
* <li>When a submission is complete, the {@link WorkflowItem} pointing to the
|
||||
* item is destroyed and the
|
||||
* {@link #archive(org.dspace.core.Context, org.dspace.workflow.WorkflowItem)}
|
||||
* method is called, which hooks the item up to the archive.
|
||||
* </ul>
|
||||
*
|
||||
* Notification: When an item enters a state that requires notification,
|
||||
* <p>Notification: When an item enters a state that requires notification,
|
||||
* (WFSTATE_STEP1POOL, WFSTATE_STEP2POOL, WFSTATE_STEP3POOL,) the workflow needs
|
||||
* to notify the appropriate groups that they have a pending task to claim.
|
||||
*
|
||||
* Revealing lists of approvers, editors, and reviewers. A method could be added
|
||||
* <p>Revealing lists of approvers, editors, and reviewers. A method could be added
|
||||
* to do this, but it isn't strictly necessary. (say public List
|
||||
* getStateEPeople( WorkflowItem wi, int state ) could return people affected by
|
||||
* the item's current state.
|
||||
@@ -110,7 +118,7 @@ public class WorkflowManager
|
||||
};
|
||||
|
||||
/* support for 'no notification' */
|
||||
private static Map<Integer, Boolean> noEMail = new HashMap<Integer, Boolean>();
|
||||
private static final Map<Integer, Boolean> noEMail = new HashMap<Integer, Boolean>();
|
||||
|
||||
/** log4j logger */
|
||||
private static final Logger log = Logger.getLogger(WorkflowManager.class);
|
||||
@@ -144,11 +152,13 @@ public class WorkflowManager
|
||||
* @param wsi
|
||||
* The WorkspaceItem to convert to a workflow item
|
||||
* @return The resulting workflow item
|
||||
* @throws java.sql.SQLException passed through.
|
||||
* @throws org.dspace.authorize.AuthorizeException passed through.
|
||||
* @throws java.io.IOException passed through.
|
||||
*/
|
||||
public static WorkflowItem start(Context c, WorkspaceItem wsi)
|
||||
throws SQLException, AuthorizeException, IOException
|
||||
{
|
||||
// FIXME Check auth
|
||||
Item myitem = wsi.getItem();
|
||||
Collection collection = wsi.getCollection();
|
||||
|
||||
@@ -210,7 +220,7 @@ public class WorkflowManager
|
||||
{
|
||||
ArrayList<WorkflowItem> mylist = new ArrayList<WorkflowItem>();
|
||||
|
||||
String myquery = "SELECT * FROM WorkflowItem WHERE owner= ? ";
|
||||
String myquery = "SELECT * FROM WorkflowItem WHERE owner= ? ORDER BY workflow_id";
|
||||
|
||||
TableRowIterator tri = DatabaseManager.queryTable(c,
|
||||
"workflowitem", myquery,e.getID());
|
||||
@@ -246,7 +256,7 @@ public class WorkflowManager
|
||||
|
||||
String myquery = "SELECT workflowitem.* FROM workflowitem, TaskListItem" +
|
||||
" WHERE tasklistitem.eperson_id= ? " +
|
||||
" AND tasklistitem.workflow_id=workflowitem.workflow_id";
|
||||
" AND tasklistitem.workflow_id=workflowitem.workflow_id ORDER BY workflowitem.workflow_id";
|
||||
|
||||
TableRowIterator tri = DatabaseManager
|
||||
.queryTable(c, "workflowitem", myquery, e.getID());
|
||||
@@ -272,10 +282,15 @@ public class WorkflowManager
|
||||
/**
|
||||
* claim() claims a workflow task for an EPerson
|
||||
*
|
||||
* @param c
|
||||
* Current user context.
|
||||
* @param wi
|
||||
* WorkflowItem to do the claim on
|
||||
* @param e
|
||||
* The EPerson doing the claim
|
||||
* @throws java.sql.SQLException passed through.
|
||||
* @throws java.io.IOException passed through.
|
||||
* @throws org.dspace.authorize.AuthorizeException passed through.
|
||||
*/
|
||||
public static void claim(Context c, WorkflowItem wi, EPerson e)
|
||||
throws SQLException, IOException, AuthorizeException
|
||||
@@ -286,27 +301,28 @@ public class WorkflowManager
|
||||
{
|
||||
case WFSTATE_STEP1POOL:
|
||||
|
||||
// authorize DSpaceActions.SUBMIT_REVIEW
|
||||
// FIXME note: authorizeAction ASSUMES that c.getCurrentUser() == e!
|
||||
AuthorizeManager.authorizeAction(c, wi.getCollection(), Constants.WORKFLOW_STEP_1, true);
|
||||
doState(c, wi, WFSTATE_STEP1, e);
|
||||
|
||||
break;
|
||||
|
||||
case WFSTATE_STEP2POOL:
|
||||
|
||||
// authorize DSpaceActions.SUBMIT_STEP2
|
||||
AuthorizeManager.authorizeAction(c, wi.getCollection(), Constants.WORKFLOW_STEP_2, true);
|
||||
doState(c, wi, WFSTATE_STEP2, e);
|
||||
|
||||
break;
|
||||
|
||||
case WFSTATE_STEP3POOL:
|
||||
|
||||
// authorize DSpaceActions.SUBMIT_STEP3
|
||||
AuthorizeManager.authorizeAction(c, wi.getCollection(), Constants.WORKFLOW_STEP_3, true);
|
||||
doState(c, wi, WFSTATE_STEP3, e);
|
||||
|
||||
break;
|
||||
|
||||
// if we got here, we weren't pooled... error?
|
||||
// FIXME - log the error?
|
||||
default:
|
||||
throw new IllegalArgumentException("Workflow Step " + taskstate + " is out of range.");
|
||||
}
|
||||
|
||||
log.info(LogManager.getHeader(c, "claim_task", "workflow_item_id="
|
||||
@@ -329,6 +345,9 @@ public class WorkflowManager
|
||||
* WorkflowItem do do the approval on
|
||||
* @param e
|
||||
* EPerson doing the approval
|
||||
* @throws java.sql.SQLException passed through.
|
||||
* @throws java.io.IOException passed through.
|
||||
* @throws org.dspace.authorize.AuthorizeException passed through.
|
||||
*/
|
||||
public static void advance(Context c, WorkflowItem wi, EPerson e)
|
||||
throws SQLException, IOException, AuthorizeException
|
||||
@@ -337,11 +356,12 @@ public class WorkflowManager
|
||||
}
|
||||
|
||||
/**
|
||||
* advance() sends an item forward in the workflow (reviewers,
|
||||
* approvers, and editors all do an 'approve' to move the item forward) if
|
||||
* the item arrives at the submit state, then remove the WorkflowItem and
|
||||
* call the archive() method to put it in the archive, and email notify the
|
||||
* submitter of a successful submission
|
||||
* advance() sends an item forward in the workflow. Reviewers,
|
||||
* approvers, and editors all do an 'approve' to move the item forward.
|
||||
* If the item arrives at the submit state, then remove the WorkflowItem,
|
||||
* call the {@link #archive(org.dspace.core.Context, org.dspace.workflow.WorkflowItem)}
|
||||
* method to put it in the archive, and email notify the
|
||||
* submitter of a successful submission.
|
||||
*
|
||||
* @param c
|
||||
* Context
|
||||
@@ -355,6 +375,10 @@ public class WorkflowManager
|
||||
*
|
||||
* @param record
|
||||
* boolean indicating whether to record action
|
||||
* @return true if the state was advanced.
|
||||
* @throws java.sql.SQLException passed through.
|
||||
* @throws java.io.IOException passed through.
|
||||
* @throws org.dspace.authorize.AuthorizeException passed through.
|
||||
*/
|
||||
public static boolean advance(Context c, WorkflowItem wi, EPerson e,
|
||||
boolean curate, boolean record)
|
||||
@@ -385,8 +409,15 @@ public class WorkflowManager
|
||||
break;
|
||||
|
||||
case WFSTATE_STEP1:
|
||||
// advance(...) will call itself if no workflow step group exists
|
||||
// so we need to check permissions only if a workflow step group is
|
||||
// in place.
|
||||
if (wi.getCollection().getWorkflowGroup(1) != null)
|
||||
{
|
||||
// FIXME note: authorizeAction ASSUMES that c.getCurrentUser() == e!
|
||||
AuthorizeManager.authorizeAction(c, wi.getCollection(), Constants.WORKFLOW_STEP_1, true);
|
||||
}
|
||||
|
||||
// authorize DSpaceActions.SUBMIT_REVIEW
|
||||
// Record provenance
|
||||
if (record)
|
||||
{
|
||||
@@ -397,8 +428,14 @@ public class WorkflowManager
|
||||
break;
|
||||
|
||||
case WFSTATE_STEP2:
|
||||
// advance(...) will call itself if no workflow step group exists
|
||||
// so we need to check permissions only if a workflow step group is
|
||||
// in place.
|
||||
if (wi.getCollection().getWorkflowGroup(2) != null)
|
||||
{
|
||||
AuthorizeManager.authorizeAction(c, wi.getCollection(), Constants.WORKFLOW_STEP_2, true);
|
||||
}
|
||||
|
||||
// authorize DSpaceActions.SUBMIT_STEP2
|
||||
// Record provenance
|
||||
if (record)
|
||||
{
|
||||
@@ -409,8 +446,14 @@ public class WorkflowManager
|
||||
break;
|
||||
|
||||
case WFSTATE_STEP3:
|
||||
// advance(...) will call itself if no workflow step group exists
|
||||
// so we need to check permissions only if a workflow step group is
|
||||
// in place.
|
||||
if (wi.getCollection().getWorkflowGroup(3) != null)
|
||||
{
|
||||
AuthorizeManager.authorizeAction(c, wi.getCollection(), Constants.WORKFLOW_STEP_3, true);
|
||||
}
|
||||
|
||||
// authorize DSpaceActions.SUBMIT_STEP3
|
||||
// We don't record approval for editors, since they can't reject,
|
||||
// and thus didn't actually make a decision
|
||||
archived = doState(c, wi, WFSTATE_ARCHIVE, e);
|
||||
@@ -429,7 +472,7 @@ public class WorkflowManager
|
||||
}
|
||||
|
||||
/**
|
||||
* unclaim() returns an owned task/item to the pool
|
||||
* returns an owned task/item to the pool
|
||||
*
|
||||
* @param c
|
||||
* Context
|
||||
@@ -437,6 +480,9 @@ public class WorkflowManager
|
||||
* WorkflowItem to operate on
|
||||
* @param e
|
||||
* EPerson doing the operation
|
||||
* @throws java.sql.SQLException passed through.
|
||||
* @throws java.io.IOException passed through.
|
||||
* @throws org.dspace.authorize.AuthorizeException passed through.
|
||||
*/
|
||||
public static void unclaim(Context c, WorkflowItem wi, EPerson e)
|
||||
throws SQLException, IOException, AuthorizeException
|
||||
@@ -447,27 +493,31 @@ public class WorkflowManager
|
||||
{
|
||||
case WFSTATE_STEP1:
|
||||
|
||||
// authorize DSpaceActions.STEP1
|
||||
doState(c, wi, WFSTATE_STEP1POOL, e);
|
||||
|
||||
break;
|
||||
|
||||
case WFSTATE_STEP2:
|
||||
|
||||
// authorize DSpaceActions.APPROVE
|
||||
doState(c, wi, WFSTATE_STEP2POOL, e);
|
||||
|
||||
break;
|
||||
|
||||
case WFSTATE_STEP3:
|
||||
|
||||
// authorize DSpaceActions.STEP3
|
||||
doState(c, wi, WFSTATE_STEP3POOL, e);
|
||||
|
||||
break;
|
||||
|
||||
// error handling? shouldn't get here
|
||||
// FIXME - what to do with error - log it?
|
||||
default:
|
||||
throw new IllegalStateException("WorkflowItem reached an unknown state.");
|
||||
}
|
||||
|
||||
try {
|
||||
c.turnOffAuthorisationSystem();
|
||||
wi.update();
|
||||
} finally {
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
log.info(LogManager.getHeader(c, "unclaim_workflow",
|
||||
@@ -511,166 +561,299 @@ public class WorkflowManager
|
||||
returnToWorkspace(c, wi);
|
||||
}
|
||||
|
||||
// returns true if archived
|
||||
/**
|
||||
* Move a workflow item to a new state. The item may be put in a pool,
|
||||
* removed from a pool and assigned to a user, or archived.
|
||||
*
|
||||
* @param c current DSpace context.
|
||||
* @param wi workflow item whose state should transition.
|
||||
* @param newstate move {@link wi} to this state.
|
||||
* @param newowner assign {@link wi} to this user.
|
||||
* @return true if archived.
|
||||
* @throws SQLException passed through.
|
||||
* @throws IOException passed through.
|
||||
* @throws AuthorizeException passed through.
|
||||
*/
|
||||
private static boolean doState(Context c, WorkflowItem wi, int newstate,
|
||||
EPerson newowner) throws SQLException, IOException,
|
||||
AuthorizeException
|
||||
{
|
||||
Collection mycollection = wi.getCollection();
|
||||
Group mygroup = null;
|
||||
boolean archived = false;
|
||||
|
||||
//Gather our old data for launching the workflow event
|
||||
int oldState = wi.getState();
|
||||
|
||||
wi.setState(newstate);
|
||||
|
||||
boolean archived;
|
||||
switch (newstate)
|
||||
{
|
||||
case WFSTATE_STEP1POOL:
|
||||
|
||||
// any reviewers?
|
||||
// if so, add them to the tasklist
|
||||
wi.setOwner(null);
|
||||
|
||||
// get reviewers (group 1 )
|
||||
mygroup = mycollection.getWorkflowGroup(1);
|
||||
|
||||
if ((mygroup != null) && !(mygroup.isEmpty()))
|
||||
{
|
||||
// get a list of all epeople in group (or any subgroups)
|
||||
EPerson[] epa = Group.allMembers(c, mygroup);
|
||||
|
||||
// there were reviewers, change the state
|
||||
// and add them to the list
|
||||
createTasks(c, wi, epa);
|
||||
wi.update();
|
||||
|
||||
// email notification
|
||||
notifyGroupOfTask(c, wi, mygroup, epa);
|
||||
}
|
||||
else
|
||||
{
|
||||
// no reviewers, skip ahead
|
||||
wi.setState(WFSTATE_STEP1);
|
||||
archived = advance(c, wi, null, true, false);
|
||||
}
|
||||
|
||||
archived = pool(c, wi, 1);
|
||||
break;
|
||||
|
||||
case WFSTATE_STEP1:
|
||||
|
||||
// remove reviewers from tasklist
|
||||
// assign owner
|
||||
deleteTasks(c, wi);
|
||||
wi.setOwner(newowner);
|
||||
|
||||
assignToReviewer(c, wi, 1, newowner);
|
||||
archived = false;
|
||||
break;
|
||||
|
||||
case WFSTATE_STEP2POOL:
|
||||
|
||||
// clear owner
|
||||
// any approvers?
|
||||
// if so, add them to tasklist
|
||||
// if not, skip to next state
|
||||
wi.setOwner(null);
|
||||
|
||||
// get approvers (group 2)
|
||||
mygroup = mycollection.getWorkflowGroup(2);
|
||||
|
||||
if ((mygroup != null) && !(mygroup.isEmpty()))
|
||||
{
|
||||
//get a list of all epeople in group (or any subgroups)
|
||||
EPerson[] epa = Group.allMembers(c, mygroup);
|
||||
|
||||
// there were approvers, change the state
|
||||
// timestamp, and add them to the list
|
||||
createTasks(c, wi, epa);
|
||||
|
||||
// email notification
|
||||
notifyGroupOfTask(c, wi, mygroup, epa);
|
||||
}
|
||||
else
|
||||
{
|
||||
// no reviewers, skip ahead
|
||||
wi.setState(WFSTATE_STEP2);
|
||||
archived = advance(c, wi, null, true, false);
|
||||
}
|
||||
|
||||
archived = pool(c, wi, 2);
|
||||
break;
|
||||
|
||||
case WFSTATE_STEP2:
|
||||
|
||||
// remove admins from tasklist
|
||||
// assign owner
|
||||
deleteTasks(c, wi);
|
||||
wi.setOwner(newowner);
|
||||
|
||||
assignToReviewer(c, wi, 2, newowner);
|
||||
archived = false;
|
||||
break;
|
||||
|
||||
case WFSTATE_STEP3POOL:
|
||||
|
||||
// any editors?
|
||||
// if so, add them to tasklist
|
||||
wi.setOwner(null);
|
||||
mygroup = mycollection.getWorkflowGroup(3);
|
||||
|
||||
if ((mygroup != null) && !(mygroup.isEmpty()))
|
||||
{
|
||||
// get a list of all epeople in group (or any subgroups)
|
||||
EPerson[] epa = Group.allMembers(c, mygroup);
|
||||
|
||||
// there were editors, change the state
|
||||
// timestamp, and add them to the list
|
||||
createTasks(c, wi, epa);
|
||||
|
||||
// email notification
|
||||
notifyGroupOfTask(c, wi, mygroup, epa);
|
||||
}
|
||||
else
|
||||
{
|
||||
// no editors, skip ahead
|
||||
wi.setState(WFSTATE_STEP3);
|
||||
archived = advance(c, wi, null, true, false);
|
||||
}
|
||||
|
||||
archived = pool(c, wi, 3);
|
||||
break;
|
||||
|
||||
case WFSTATE_STEP3:
|
||||
|
||||
// remove editors from tasklist
|
||||
// assign owner
|
||||
deleteTasks(c, wi);
|
||||
wi.setOwner(newowner);
|
||||
|
||||
assignToReviewer(c, wi, 3, newowner);
|
||||
archived = false;
|
||||
break;
|
||||
|
||||
case WFSTATE_ARCHIVE:
|
||||
|
||||
// put in archive in one transaction
|
||||
// remove workflow tasks
|
||||
deleteTasks(c, wi);
|
||||
|
||||
mycollection = wi.getCollection();
|
||||
|
||||
Item myitem = archive(c, wi);
|
||||
Item myItem = archive(c, wi);
|
||||
|
||||
// now email notification
|
||||
notifyOfArchive(c, myitem, mycollection);
|
||||
archived = true;
|
||||
notifyOfArchive(c, myItem, mycollection);
|
||||
|
||||
break;
|
||||
// remove any workflow policies left
|
||||
try {
|
||||
c.turnOffAuthorisationSystem();
|
||||
revokeReviewerPolicies(c, myItem);
|
||||
} finally {
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
logWorkflowEvent(c, wi.getItem(), wi, c.getCurrentUser(), newstate, newowner, mycollection, oldState, mygroup);
|
||||
logWorkflowEvent(c, wi.getItem(), wi, c.getCurrentUser(), newstate,
|
||||
newowner, mycollection, oldState, null);
|
||||
return true;
|
||||
default:
|
||||
throw new IllegalArgumentException("WorkflowManager cannot handle workflowItemState " + newstate);
|
||||
}
|
||||
|
||||
if (!archived)
|
||||
{
|
||||
try {
|
||||
c.turnOffAuthorisationSystem();
|
||||
wi.update();
|
||||
} finally {
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
return archived;
|
||||
}
|
||||
|
||||
/**
|
||||
* Assign this workflow item to a reviewer.
|
||||
*
|
||||
* @param context current DSpace context.
|
||||
* @param workflowItem the item to be assigned.
|
||||
* @param step review step.
|
||||
* @param newowner the reviewer to be assigned.
|
||||
* @throws AuthorizeException passed through.
|
||||
* @throws SQLException passed through.
|
||||
* @throws IllegalArgumentException if {@code step} is unknown.
|
||||
*/
|
||||
protected static void assignToReviewer(Context context, WorkflowItem workflowItem,
|
||||
int step, EPerson newowner)
|
||||
throws AuthorizeException, SQLException
|
||||
{
|
||||
// shortcut to the collection
|
||||
Collection collection = workflowItem.getCollection();
|
||||
// from the step we can recognize the new state and the corresponding policy action.
|
||||
int newState;
|
||||
int correspondingAction;
|
||||
switch (step)
|
||||
{
|
||||
case 1:
|
||||
newState = WFSTATE_STEP1;
|
||||
correspondingAction = Constants.WORKFLOW_STEP_1;
|
||||
break;
|
||||
case 2:
|
||||
newState = WFSTATE_STEP2;
|
||||
correspondingAction = Constants.WORKFLOW_STEP_2;
|
||||
break;
|
||||
case 3:
|
||||
newState = WFSTATE_STEP3;
|
||||
correspondingAction = Constants.WORKFLOW_STEP_3;
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown workflow step " + step);
|
||||
}
|
||||
|
||||
// Gather the old state for logging.
|
||||
int oldState = workflowItem.getState();
|
||||
|
||||
// If there is a workflow state group and it contains any members,
|
||||
// then we have to check the permissions first.
|
||||
Group stateGroup = collection.getWorkflowGroup(step);
|
||||
if ((stateGroup != null) && !(stateGroup.isEmpty()))
|
||||
{
|
||||
// FIXME note: authorizeAction ASSUMES that c.getCurrentUser() == newowner!
|
||||
AuthorizeManager.authorizeAction(context, collection, correspondingAction, true);
|
||||
}
|
||||
|
||||
// Give the owner the appropriate permissions.
|
||||
try {
|
||||
context.turnOffAuthorisationSystem();
|
||||
// maybe unnecessary, but revoke any perviously granted permissions.
|
||||
revokeReviewerPolicies(context, workflowItem.getItem());
|
||||
// Finally grant the new permissions.
|
||||
grantReviewerPolicies(context, workflowItem, newowner);
|
||||
} finally {
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
// Remove task from tasklist as someone is working on it now.
|
||||
deleteTasks(context, workflowItem);
|
||||
// Assign new owner.
|
||||
workflowItem.setState(newState);
|
||||
workflowItem.setOwner(newowner);
|
||||
|
||||
logWorkflowEvent(context, workflowItem.getItem(), workflowItem,
|
||||
context.getCurrentUser(), newState, newowner, collection, oldState, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method that manages state, policies, owner, notifies, task list items
|
||||
* and so on whenever a WorkflowItem should be added to a workflow step pool.
|
||||
* Don't use this method directly. Either use
|
||||
* {@link #unclaim(Context, WorkflowItem, EPerson)} if the item is claimed,
|
||||
* {@link #start(Context, WorkspaceItem)} to start the workflow, or
|
||||
* {@link #advance(Context, WorkflowItem, EPerson)} to move an item to the next state.
|
||||
*
|
||||
* @param context DSpace context object.
|
||||
* @param workflowItem the item to be pooled.
|
||||
* @param step the step (1-3) of the pool the item should be put into.
|
||||
* @return true if the item was archived because no reviewers were assigned
|
||||
* to any of the following workflow steps, false otherwise.
|
||||
* @throws SQLException passed through.
|
||||
* @throws AuthorizeException passed through.
|
||||
* @throws IOException passed through.
|
||||
* @throws IllegalArgumentException if {@code step} has another value than
|
||||
* either 1, 2, or 3.
|
||||
*/
|
||||
protected static boolean pool(Context context, WorkflowItem workflowItem, int step)
|
||||
throws SQLException, AuthorizeException, IOException
|
||||
{
|
||||
// shortcut to the collection
|
||||
Collection collection = workflowItem.getCollection();
|
||||
|
||||
// From the step we can recognize the new state and the corresponding state.
|
||||
// The new state is the pool of the step.
|
||||
// The corresponding state is the state an item gets when it is claimed.
|
||||
// That is important to recognize if we have to send notifications
|
||||
// and if we have to skip a pool.
|
||||
int newState;
|
||||
int correspondingState;
|
||||
switch (step)
|
||||
{
|
||||
case 1:
|
||||
newState = WFSTATE_STEP1POOL;
|
||||
correspondingState = WFSTATE_STEP1;
|
||||
break;
|
||||
case 2:
|
||||
newState = WFSTATE_STEP2POOL;
|
||||
correspondingState = WFSTATE_STEP2;
|
||||
break;
|
||||
case 3:
|
||||
newState = WFSTATE_STEP3POOL;
|
||||
correspondingState = WFSTATE_STEP3;
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown workflow step " + step);
|
||||
}
|
||||
|
||||
// Gather our old owner and state, as we need those as well to determine
|
||||
// whether we have to send notifications.
|
||||
int oldState = workflowItem.getState();
|
||||
EPerson oldOwner = workflowItem.getOwner();
|
||||
// Clear owner.
|
||||
workflowItem.setOwner(null);
|
||||
// Don't revoke the reviewer policies yet. They may be needed to advance the item.
|
||||
|
||||
// Any approvers? If so, add them to the tasklist; if not, skip to next state.
|
||||
Group workflowStepGroup = collection.getWorkflowGroup(step);
|
||||
if ((workflowStepGroup != null) && !(workflowStepGroup.isEmpty()))
|
||||
{
|
||||
// Set new item state.
|
||||
workflowItem.setState(newState);
|
||||
|
||||
// Revoke previously granted reviewer policies and grant read permissions.
|
||||
try {
|
||||
context.turnOffAuthorisationSystem();
|
||||
// Revoke previously granted policies.
|
||||
revokeReviewerPolicies(context, workflowItem.getItem());
|
||||
|
||||
// JSPUI offers a preview to every task before a reviewer claims it.
|
||||
// So we need to grant permissions in advance, so that all
|
||||
// possible reviewers can read the item and all bitstreams in
|
||||
// the bundle "ORIGINAL".
|
||||
AuthorizeManager.addPolicy(context, workflowItem.getItem(),
|
||||
Constants.READ, workflowStepGroup,
|
||||
ResourcePolicy.TYPE_WORKFLOW);
|
||||
Bundle originalBundle;
|
||||
try {
|
||||
originalBundle = workflowItem.getItem().getBundles("ORIGINAL")[0];
|
||||
} catch (IndexOutOfBoundsException ex) {
|
||||
originalBundle = null;
|
||||
}
|
||||
if (originalBundle != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(context, originalBundle, Constants.READ,
|
||||
workflowStepGroup, ResourcePolicy.TYPE_WORKFLOW);
|
||||
for (Bitstream bitstream : originalBundle.getBitstreams())
|
||||
{
|
||||
AuthorizeManager.addPolicy(context, bitstream, Constants.READ,
|
||||
workflowStepGroup, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
// Get a list of all epeople in group (or any subgroups)
|
||||
EPerson[] epa = Group.allMembers(context, workflowStepGroup);
|
||||
|
||||
// There were reviewers. Change the state and then add them to the list.
|
||||
createTasks(context, workflowItem, epa);
|
||||
ConfigurationService configurationService = new DSpace().getConfigurationService();
|
||||
if (configurationService.getPropertyAsType("workflow.notify.returned.tasks", true)
|
||||
|| oldState != correspondingState
|
||||
|| oldOwner == null)
|
||||
{
|
||||
// Email notification
|
||||
notifyGroupOfTask(context, workflowItem, workflowStepGroup, epa);
|
||||
}
|
||||
logWorkflowEvent(context, workflowItem.getItem(), workflowItem,
|
||||
context.getCurrentUser(), newState, null, collection,
|
||||
oldState, workflowStepGroup);
|
||||
return false;
|
||||
}
|
||||
else
|
||||
{
|
||||
// No reviewers -- skip ahead.
|
||||
workflowItem.setState(correspondingState);
|
||||
boolean archived = advance(context, workflowItem, null, true, false);
|
||||
if (archived)
|
||||
{
|
||||
// Remove any workflow policies that may be left over.
|
||||
try {
|
||||
context.turnOffAuthorisationSystem();
|
||||
revokeReviewerPolicies(context, workflowItem.getItem());
|
||||
} finally {
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
}
|
||||
return archived;
|
||||
}
|
||||
}
|
||||
|
||||
private static void logWorkflowEvent(Context c, Item item, WorkflowItem workflowItem, EPerson actor, int newstate, EPerson newOwner, Collection mycollection, int oldState, Group newOwnerGroup) {
|
||||
if(newstate == WFSTATE_ARCHIVE || newstate == WFSTATE_STEP1POOL || newstate == WFSTATE_STEP2POOL || newstate == WFSTATE_STEP3POOL){
|
||||
@@ -794,6 +977,8 @@ public class WorkflowManager
|
||||
Item myitem = wfi.getItem();
|
||||
Collection mycollection = wfi.getCollection();
|
||||
|
||||
// Regarding auth: this method s private.
|
||||
// Authorization should be checked in all public methods calling this one.
|
||||
// FIXME: How should this interact with the workflow system?
|
||||
// FIXME: Remove license
|
||||
// FIXME: Provenance statement?
|
||||
@@ -896,7 +1081,7 @@ public class WorkflowManager
|
||||
}
|
||||
}
|
||||
|
||||
// deletes all tasks associated with a workflowitem
|
||||
/** Deletes all tasks associated with a workflowitem. */
|
||||
static void deleteTasks(Context c, WorkflowItem wi) throws SQLException
|
||||
{
|
||||
String myrequest = "DELETE FROM TaskListItem WHERE workflow_id= ? ";
|
||||
@@ -1100,6 +1285,8 @@ public class WorkflowManager
|
||||
* get the name of the eperson who started this workflow
|
||||
*
|
||||
* @param wi the workflow item
|
||||
* @return "user name (email@address)"
|
||||
* @throws java.sql.SQLException passed through.
|
||||
*/
|
||||
public static String getSubmitterName(WorkflowItem wi) throws SQLException
|
||||
{
|
||||
@@ -1110,6 +1297,10 @@ public class WorkflowManager
|
||||
|
||||
private static String getEPersonName(EPerson e) throws SQLException
|
||||
{
|
||||
if (e == null)
|
||||
{
|
||||
return "Unknown";
|
||||
}
|
||||
String submitter = e.getFullName();
|
||||
|
||||
submitter = submitter + " (" + e.getEmail() + ")";
|
||||
@@ -1171,4 +1362,116 @@ public class WorkflowManager
|
||||
myitem.addDC("description", "provenance", "en", provmessage);
|
||||
myitem.update();
|
||||
}
|
||||
|
||||
/**
|
||||
* This method grants the appropriate permissions to reviewers so that they
|
||||
* can read and edit metadata and read files and edit files if allowed by
|
||||
* configuration.
|
||||
* <p>
|
||||
* In most cases this method must be called within a try-finally-block that
|
||||
* temporarily disables the authentication system. That is not done by this
|
||||
* method as it should be done carefully and only in contexts in which
|
||||
* granting the permissions is authorized by some previous checks.
|
||||
*
|
||||
* @param context
|
||||
* @param wfi While all policies are granted on item, bundle or bitstream
|
||||
* level, this method takes a {@link WorkflowItem} for convenience and
|
||||
* uses wfi.getItem() to get the actual item.
|
||||
* @param reviewer EPerson to grant the rights to.
|
||||
* @throws SQLException
|
||||
* @throws AuthorizeException
|
||||
*/
|
||||
protected static void grantReviewerPolicies(Context context, WorkflowItem wfi, EPerson reviewer)
|
||||
throws SQLException, AuthorizeException
|
||||
{
|
||||
// get item and bundle "ORIGINAL"
|
||||
Item item = wfi.getItem();
|
||||
Bundle originalBundle;
|
||||
try {
|
||||
originalBundle = item.getBundles("ORIGINAL")[0];
|
||||
} catch (IndexOutOfBoundsException ex) {
|
||||
originalBundle = null;
|
||||
}
|
||||
|
||||
// grant item level policies
|
||||
for (int action : new int[] {Constants.READ, Constants.WRITE, Constants.ADD, Constants.REMOVE, Constants.DELETE})
|
||||
{
|
||||
AuthorizeManager.addPolicy(context, item, action, reviewer, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
|
||||
// set bitstream and bundle policies
|
||||
if (originalBundle != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(context, originalBundle, Constants.READ, reviewer, ResourcePolicy.TYPE_WORKFLOW);
|
||||
|
||||
// shall reviewers be able to edit files?
|
||||
ConfigurationService configurationService = new DSpace().getConfigurationService();
|
||||
boolean editFiles = Boolean.parseBoolean(configurationService.getProperty("workflow.reviewer.file-edit"));
|
||||
// if a reviewer should be able to edit bitstreams, we need add
|
||||
// permissions regarding the bundle "ORIGINAL" and its bitstreams
|
||||
if (editFiles)
|
||||
{
|
||||
AuthorizeManager.addPolicy(context, originalBundle, Constants.ADD, reviewer, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(context, originalBundle, Constants.REMOVE, reviewer, ResourcePolicy.TYPE_WORKFLOW);
|
||||
// Whenever a new bitstream is added, it inherit the policies of the bundle.
|
||||
// So we need to add all policies newly created bitstreams should get.
|
||||
AuthorizeManager.addPolicy(context, originalBundle, Constants.WRITE, reviewer, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(context, originalBundle, Constants.DELETE, reviewer, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
for (Bitstream bitstream : originalBundle.getBitstreams())
|
||||
{
|
||||
AuthorizeManager.addPolicy(context, bitstream, Constants.READ, reviewer, ResourcePolicy.TYPE_WORKFLOW);
|
||||
|
||||
// add further rights if reviewer should be able to edit bitstreams
|
||||
if (editFiles)
|
||||
{
|
||||
AuthorizeManager.addPolicy(context, bitstream, Constants.WRITE, reviewer, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(context, bitstream, Constants.DELETE, reviewer, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This method revokes any permission granted by the basic workflow system
|
||||
* on the item specified as attribute. At time of writing this method these
|
||||
* permissions will all be granted by
|
||||
* {@link #grantReviewerPolicies(org.dspace.core.Context, org.dspace.workflowbasic.BasicWorkflowItem, org.dspace.eperson.EPerson)}.
|
||||
* <p>
|
||||
* In most cases this method must be called within a try-finally-block that
|
||||
* temporarily disables the authentication system. That is not done by this
|
||||
* method as it should be done carefully and only in contexts in which
|
||||
* revoking the permissions is authorized by some previous checks.
|
||||
*
|
||||
* @param context
|
||||
* @param item
|
||||
* @throws SQLException passed through.
|
||||
* @throws AuthorizeException passed through.
|
||||
*/
|
||||
protected static void revokeReviewerPolicies(Context context, Item item)
|
||||
throws SQLException, AuthorizeException
|
||||
{
|
||||
// get bundle "ORIGINAL"
|
||||
Bundle originalBundle;
|
||||
try {
|
||||
originalBundle = item.getBundles("ORIGINAL")[0];
|
||||
} catch (IndexOutOfBoundsException ex) {
|
||||
originalBundle = null;
|
||||
}
|
||||
|
||||
// remove bitstream and bundle level policies
|
||||
if (originalBundle != null)
|
||||
{
|
||||
// We added policies for Bitstreams of the bundle "original" only
|
||||
for (Bitstream bitstream : originalBundle.getBitstreams())
|
||||
{
|
||||
AuthorizeManager.removeAllPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
|
||||
AuthorizeManager.removeAllPoliciesByDSOAndType(context, originalBundle, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
|
||||
// remove item level policies
|
||||
AuthorizeManager.removeAllPoliciesByDSOAndType(context, item, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
}
|
||||
|
@@ -92,6 +92,15 @@ public class XmlWorkflowManager {
|
||||
* startWithoutNotify() starts the workflow normally, but disables
|
||||
* notifications (useful for large imports,) for the first workflow step -
|
||||
* subsequent notifications happen normally
|
||||
* @param c
|
||||
* @param wsi
|
||||
* @return a new workflow item wrapping the item removed from the workspace.
|
||||
* @throws java.sql.SQLException passed through
|
||||
* @throws org.dspace.authorize.AuthorizeException passed through.
|
||||
* @throws java.io.IOException passed through.
|
||||
* @throws org.dspace.xmlworkflow.WorkflowException passed through.
|
||||
* @throws org.dspace.xmlworkflow.WorkflowConfigurationException passed through.
|
||||
* @throws javax.mail.MessagingException passed through.
|
||||
*/
|
||||
public static XmlWorkflowItem startWithoutNotify(Context c, WorkspaceItem wsi)
|
||||
throws SQLException, AuthorizeException, IOException, WorkflowException, WorkflowConfigurationException, MessagingException {
|
||||
@@ -352,9 +361,14 @@ public class XmlWorkflowManager {
|
||||
* with the relevant collection, added to the search index, and any other
|
||||
* tasks such as assigning dates are performed.
|
||||
*
|
||||
* @param c
|
||||
* @param wfi
|
||||
* @return the fully archived item.
|
||||
* @throws java.sql.SQLException passed through.
|
||||
* @throws java.io.IOException passed through.
|
||||
* @throws org.dspace.authorize.AuthorizeException passed through.
|
||||
*/
|
||||
public static Item archive(Context c, XmlWorkflowItem wfi)
|
||||
protected static Item archive(Context c, XmlWorkflowItem wfi)
|
||||
throws SQLException, IOException, AuthorizeException {
|
||||
// FIXME: Check auth
|
||||
Item item = wfi.getItem();
|
||||
|
@@ -289,6 +289,8 @@ jsp.dspace-admin.general.eperson = EPerson
|
||||
jsp.dspace-admin.general.group = Group
|
||||
jsp.dspace-admin.general.group-colon = Group:
|
||||
jsp.dspace-admin.general.next.button = Next >
|
||||
jsp.dspace-admin.general.policy-end-date-colon = End Date:
|
||||
jsp.dspace-admin.general.policy-start-date-colon = Start Date:
|
||||
jsp.dspace-admin.general.remove = Remove
|
||||
jsp.dspace-admin.general.save = Save
|
||||
jsp.dspace-admin.general.update = Update
|
||||
@@ -1000,15 +1002,15 @@ jsp.submit.complete.info = Your submissio
|
||||
jsp.submit.complete.again = Submit another item to the same collection
|
||||
jsp.submit.complete.link = Go to My DSpace
|
||||
jsp.submit.complete.title = Submission Complete!
|
||||
jsp.submit.creative-commons.choice1 = Press the 'Next' button below to <em>keep</em> the license previously chosen.
|
||||
jsp.submit.creative-commons.choice2 = Press the 'Skip Creative Commons' button below to <em>remove</em> the current choice, and forego a Creative Commons license.
|
||||
jsp.submit.creative-commons.choice3 = Complete the selection process below to <em>replace</em> the current choice.
|
||||
jsp.submit.creative-commons.heading = Submit: Use a Creative Commons License
|
||||
jsp.submit.creative-commons.info1 = You have already chosen a Creative Commons license and added it to this item. You may:
|
||||
jsp.submit.creative-commons.info2 = To license your Item under Creative Commons, follow the instructions below. You will be given an opportunity to review your selection. Follow the 'proceed' link to add the license. If you wish to omit a Creative Commons license, press the 'Skip Creative Commons' button.
|
||||
jsp.submit.creative-commons.info3 = Your browser must support IFrames to use this feature
|
||||
jsp.submit.creative-commons.skip.button = Skip Creative Commons >
|
||||
jsp.submit.creative-commons.info1 = If you wish, you may add a <a href="http://creativecommons.org/">Creative Commons</a> License to your item. <strong>Creative Commons licenses govern what people who read your work may then do with it.</strong></message>
|
||||
jsp.submit.creative-commons.title = Use a Creative Commons License
|
||||
|
||||
jsp.submit.creative-commons.license = License Type
|
||||
jsp.submit.creative-commons.select_change = Select or modify your license ...
|
||||
jsp.submit.creative-commons.no_license = No Creative Commons License
|
||||
jsp.submit.creative-commons.license.current = Current license
|
||||
|
||||
jsp.submit.edit-bitstream-access.title = Edit Bitstream Access
|
||||
jsp.submit.edit-bitstream-access.heading = Edit Bitstream Access
|
||||
jsp.submit.edit-bitstream-access.save.button = Save
|
||||
|
@@ -9,39 +9,41 @@
|
||||
|
||||
-->
|
||||
<xsl:stylesheet version="1.1"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:old-cc="http://web.resource.org/cc/"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:old-cc="http://web.resource.org/cc/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
exclude-result-prefixes="old-cc">
|
||||
|
||||
<xsl:output method="xml" indent="yes"/>
|
||||
<xsl:output method="xml" indent="yes" />
|
||||
|
||||
|
||||
<xsl:template match="/">
|
||||
<xsl:apply-templates select="result/rdf/rdf:RDF" />
|
||||
</xsl:template>
|
||||
|
||||
<!-- process incoming RDF, copy everything add our own statements for cc:Work -->
|
||||
<xsl:template match="/rdf:RDF">
|
||||
<rdf:RDF>
|
||||
<xsl:copy-of select="@*"/>
|
||||
<xsl:apply-templates select="cc:License"/>
|
||||
<xsl:template match="result/rdf/rdf:RDF">
|
||||
<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:cc="http://creativecommons.org/ns#">
|
||||
<xsl:copy-of select="@*" />
|
||||
<xsl:apply-templates select="cc:License" />
|
||||
</rdf:RDF>
|
||||
</xsl:template>
|
||||
|
||||
<!-- handle License element -->
|
||||
<xsl:template match="cc:License">
|
||||
<cc:Work rdf:about="">
|
||||
<cc:license rdf:resource="{@rdf:about}"/>
|
||||
<cc:license rdf:resource="{@rdf:about}" />
|
||||
</cc:Work>
|
||||
<cc:License>
|
||||
<xsl:copy-of select="@*"/>
|
||||
<xsl:apply-templates select="node()"/>
|
||||
<xsl:copy-of select="@*" />
|
||||
<xsl:apply-templates select="node()" />
|
||||
</cc:License>
|
||||
</xsl:template>
|
||||
|
||||
<!--
|
||||
Identity transform
|
||||
-->
|
||||
<!-- Identity transform -->
|
||||
<xsl:template match="node()|@*">
|
||||
<xsl:copy>
|
||||
<xsl:apply-templates select="node()|@*"/>
|
||||
<xsl:apply-templates select="node()|@*" />
|
||||
</xsl:copy>
|
||||
</xsl:template>
|
||||
|
||||
|
@@ -0,0 +1,24 @@
|
||||
--
|
||||
-- The contents of this file are subject to the license and copyright
|
||||
-- detailed in the LICENSE and NOTICE files at the root of the source
|
||||
-- tree and available online at
|
||||
--
|
||||
-- http://www.dspace.org/license/
|
||||
--
|
||||
|
||||
------------------------------------------------------
|
||||
-- DS-3097 introduced new action id for WITHDRAWN_READ
|
||||
------------------------------------------------------
|
||||
|
||||
UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 0 and resource_id in (
|
||||
SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream
|
||||
LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id
|
||||
LEFT JOIN item ON item2bundle.item_id = item.item_id
|
||||
WHERE item.withdrawn = 1
|
||||
);
|
||||
|
||||
UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 1 and resource_id in (
|
||||
SELECT item2bundle.bundle_id FROM item2bundle
|
||||
LEFT JOIN item ON item2bundle.item_id = item.item_id
|
||||
WHERE item.withdrawn = 1
|
||||
);
|
@@ -0,0 +1,16 @@
|
||||
--
|
||||
-- The contents of this file are subject to the license and copyright
|
||||
-- detailed in the LICENSE and NOTICE files at the root of the source
|
||||
-- tree and available online at
|
||||
--
|
||||
-- http://www.dspace.org/license/
|
||||
--
|
||||
|
||||
------------------------------------------------------
|
||||
-- DS-3563 Missing database index on metadatavalue.resource_type_id
|
||||
------------------------------------------------------
|
||||
-- Create an index on the metadata value resource_type_id column so that it can be searched efficiently.
|
||||
|
||||
DROP INDEX IF EXISTS metadatavalue_resource_type_id_idx;
|
||||
|
||||
CREATE INDEX metadatavalue_resource_type_id_idx ON metadatavalue (resource_type_id);
|
@@ -0,0 +1,24 @@
|
||||
--
|
||||
-- The contents of this file are subject to the license and copyright
|
||||
-- detailed in the LICENSE and NOTICE files at the root of the source
|
||||
-- tree and available online at
|
||||
--
|
||||
-- http://www.dspace.org/license/
|
||||
--
|
||||
|
||||
------------------------------------------------------
|
||||
-- DS-3097 introduced new action id for WITHDRAWN_READ
|
||||
------------------------------------------------------
|
||||
|
||||
UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 0 and resource_id in (
|
||||
SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream
|
||||
LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id
|
||||
LEFT JOIN item ON item2bundle.item_id = item.item_id
|
||||
WHERE item.withdrawn = 1
|
||||
);
|
||||
|
||||
UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 1 and resource_id in (
|
||||
SELECT item2bundle.bundle_id FROM item2bundle
|
||||
LEFT JOIN item ON item2bundle.item_id = item.item_id
|
||||
WHERE item.withdrawn = 1
|
||||
);
|
@@ -0,0 +1,23 @@
|
||||
--
|
||||
-- The contents of this file are subject to the license and copyright
|
||||
-- detailed in the LICENSE and NOTICE files at the root of the source
|
||||
-- tree and available online at
|
||||
--
|
||||
-- http://www.dspace.org/license/
|
||||
--
|
||||
|
||||
------------------------------------------------------
|
||||
-- DS-3563 Missing database index on metadatavalue.resource_type_id
|
||||
------------------------------------------------------
|
||||
-- Create an index on the metadata value resource_type_id column so that it can be searched efficiently.
|
||||
declare
|
||||
index_not_exists EXCEPTION;
|
||||
PRAGMA EXCEPTION_INIT(index_not_exists, -1418);
|
||||
begin
|
||||
|
||||
execute immediate 'DROP INDEX metadatavalue_type_id_idx';
|
||||
exception
|
||||
when index_not_exists then null;
|
||||
end;
|
||||
/
|
||||
CREATE INDEX metadatavalue_type_id_idx ON metadatavalue (resource_type_id);
|
@@ -0,0 +1,24 @@
|
||||
--
|
||||
-- The contents of this file are subject to the license and copyright
|
||||
-- detailed in the LICENSE and NOTICE files at the root of the source
|
||||
-- tree and available online at
|
||||
--
|
||||
-- http://www.dspace.org/license/
|
||||
--
|
||||
|
||||
------------------------------------------------------
|
||||
-- DS-3097 introduced new action id for WITHDRAWN_READ
|
||||
------------------------------------------------------
|
||||
|
||||
UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 0 and resource_id in (
|
||||
SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream
|
||||
LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id
|
||||
LEFT JOIN item ON item2bundle.item_id = item.item_id
|
||||
WHERE item.withdrawn = true
|
||||
);
|
||||
|
||||
UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 1 and resource_id in (
|
||||
SELECT item2bundle.bundle_id FROM item2bundle
|
||||
LEFT JOIN item ON item2bundle.item_id = item.item_id
|
||||
WHERE item.withdrawn = true
|
||||
);
|
@@ -0,0 +1,16 @@
|
||||
--
|
||||
-- The contents of this file are subject to the license and copyright
|
||||
-- detailed in the LICENSE and NOTICE files at the root of the source
|
||||
-- tree and available online at
|
||||
--
|
||||
-- http://www.dspace.org/license/
|
||||
--
|
||||
|
||||
------------------------------------------------------
|
||||
-- DS-3563 Missing database index on metadatavalue.resource_type_id
|
||||
------------------------------------------------------
|
||||
-- Create an index on the metadata value resource_type_id column so that it can be searched efficiently.
|
||||
|
||||
DROP INDEX IF EXISTS metadatavalue_resource_type_id_idx;
|
||||
|
||||
CREATE INDEX metadatavalue_resource_type_id_idx ON metadatavalue (resource_type_id);
|
@@ -0,0 +1,15 @@
|
||||
--
|
||||
-- The contents of this file are subject to the license and copyright
|
||||
-- detailed in the LICENSE and NOTICE files at the root of the source
|
||||
-- tree and available online at
|
||||
--
|
||||
-- http://www.dspace.org/license/
|
||||
--
|
||||
|
||||
-------------------------------------------------------------------------
|
||||
-- DS-3431 Workflow system is vulnerable to unauthorized manipulations --
|
||||
-------------------------------------------------------------------------
|
||||
|
||||
-- H2 is used only for testing. In test, the database is always
|
||||
-- empty to start with, so there is nothing to migrate here.
|
||||
|
@@ -0,0 +1,503 @@
|
||||
--
|
||||
-- The contents of this file are subject to the license and copyright
|
||||
-- detailed in the LICENSE and NOTICE files at the root of the source
|
||||
-- tree and available online at
|
||||
--
|
||||
-- http://www.dspace.org/license/
|
||||
--
|
||||
|
||||
-------------------------------------------------------------------------
|
||||
-- DS-3431 Workflow system is vulnerable to unauthorized manipulations --
|
||||
-------------------------------------------------------------------------
|
||||
|
||||
-----------------------------------------------------------------------
|
||||
-- grant claiming permissions to all workflow step groups (step 1-3) --
|
||||
-----------------------------------------------------------------------
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id)
|
||||
SELECT
|
||||
resourcepolicy_seq.NEXTVAL AS policy_id,
|
||||
'3' AS resource_type_id,
|
||||
'5' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
workflow_step_1 AS epersongroup_id,
|
||||
collection_id AS dspace_object
|
||||
FROM collection
|
||||
WHERE workflow_step_1 IS NOT NULL
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 5 AND epersongroup_id = workflow_step_1 and resource_id = collection_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id)
|
||||
SELECT
|
||||
resourcepolicy_seq.NEXTVAL AS policy_id,
|
||||
'3' AS resource_type_id,
|
||||
'6' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
workflow_step_2 AS epersongroup_id,
|
||||
collection_id AS dspace_object
|
||||
FROM collection
|
||||
WHERE workflow_step_2 IS NOT NULL
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 6 AND epersongroup_id = workflow_step_2 and resource_id = collection_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id)
|
||||
SELECT
|
||||
resourcepolicy_seq.NEXTVAL AS policy_id,
|
||||
'3' AS resource_type_id,
|
||||
'7' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
workflow_step_3 AS epersongroup_id,
|
||||
collection_id AS dspace_object
|
||||
FROM collection
|
||||
WHERE workflow_step_3 IS NOT NULL
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 7 AND epersongroup_id = workflow_step_3 and resource_id = collection_id
|
||||
);
|
||||
|
||||
-----------------------------------------------------------------------
|
||||
-- grant add permissions to all workflow step groups (step 1-3) --
|
||||
-----------------------------------------------------------------------
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id)
|
||||
SELECT
|
||||
resourcepolicy_seq.NEXTVAL AS policy_id,
|
||||
'3' AS resource_type_id,
|
||||
'3' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
workflow_step_1 AS epersongroup_id,
|
||||
collection_id AS dspace_object
|
||||
FROM collection
|
||||
WHERE workflow_step_1 IS NOT NULL
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_1 and resource_id = collection_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id)
|
||||
SELECT
|
||||
resourcepolicy_seq.NEXTVAL AS policy_id,
|
||||
'3' AS resource_type_id,
|
||||
'3' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
workflow_step_2 AS epersongroup_id,
|
||||
collection_id AS dspace_object
|
||||
FROM collection
|
||||
WHERE workflow_step_2 IS NOT NULL
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_2 and resource_id = collection_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id)
|
||||
SELECT
|
||||
resourcepolicy_seq.NEXTVAL AS policy_id,
|
||||
'3' AS resource_type_id,
|
||||
'3' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
workflow_step_3 AS epersongroup_id,
|
||||
collection_id AS dspace_object
|
||||
FROM collection
|
||||
WHERE workflow_step_3 IS NOT NULL
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_3 and resource_id = collection_id
|
||||
);
|
||||
|
||||
----------------------------------------------------------------------------------
|
||||
-- grant read/write/delete/add/remove permission on workflow items to reviewers --
|
||||
----------------------------------------------------------------------------------
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
resourcepolicy_seq.NEXTVAL AS policy_id,
|
||||
'2' AS resource_type_id,
|
||||
'0' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
owner AS eperson_id,
|
||||
item_id AS dspace_object
|
||||
FROM workflowitem
|
||||
WHERE
|
||||
owner IS NOT NULL
|
||||
AND (state = 2 OR state = 4 OR state = 6)
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 0 AND eperson_id = owner AND resource_id = item_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
resourcepolicy_seq.NEXTVAL AS policy_id,
|
||||
'2' AS resource_type_id,
|
||||
'1' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
owner AS eperson_id,
|
||||
item_id AS dspace_object
|
||||
FROM workflowitem
|
||||
WHERE
|
||||
owner IS NOT NULL
|
||||
AND (state = 2 OR state = 4 OR state = 6)
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 1 AND eperson_id = owner AND resource_id = item_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
resourcepolicy_seq.NEXTVAL AS policy_id,
|
||||
'2' AS resource_type_id,
|
||||
'2' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
owner AS eperson_id,
|
||||
item_id AS dspace_object
|
||||
FROM workflowitem
|
||||
WHERE
|
||||
owner IS NOT NULL
|
||||
AND (state = 2 OR state = 4 OR state = 6)
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 2 AND eperson_id = owner AND resource_id = item_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
resourcepolicy_seq.NEXTVAL AS policy_id,
|
||||
'2' AS resource_type_id,
|
||||
'3' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
owner AS eperson_id,
|
||||
item_id AS dspace_object
|
||||
FROM workflowitem
|
||||
WHERE
|
||||
owner IS NOT NULL
|
||||
AND (state = 2 OR state = 4 OR state = 6)
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 3 AND eperson_id = owner AND resource_id = item_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
resourcepolicy_seq.NEXTVAL AS policy_id,
|
||||
'2' AS resource_type_id,
|
||||
'4' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
owner AS eperson_id,
|
||||
item_id AS dspace_object
|
||||
FROM workflowitem
|
||||
WHERE
|
||||
owner IS NOT NULL
|
||||
AND (state = 2 OR state = 4 OR state = 6)
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 4 AND eperson_id = owner AND resource_id = item_id
|
||||
);
|
||||
|
||||
-----------------------------------------------------------------------------------
|
||||
-- grant read/write/delete/add/remove permission on Bundle ORIGINAL to reviewers --
|
||||
-----------------------------------------------------------------------------------
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
resourcepolicy_seq.NEXTVAL AS policy_id,
|
||||
'1' AS resource_type_id,
|
||||
'0' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
wfi.owner AS eperson_id,
|
||||
i2b.bundle_id AS dspace_object
|
||||
FROM workflowitem wfi
|
||||
JOIN item2bundle i2b
|
||||
ON i2b.item_id = wfi.item_id
|
||||
JOIN metadatavalue mv
|
||||
ON mv.resource_id = i2b.bundle_id
|
||||
JOIN metadatafieldregistry mfr
|
||||
ON mv.metadata_field_id = mfr.metadata_field_id
|
||||
JOIN metadataschemaregistry msr
|
||||
ON mfr.metadata_schema_id = msr.metadata_schema_id
|
||||
WHERE
|
||||
msr.namespace = 'http://dublincore.org/documents/dcmi-terms/'
|
||||
AND mfr.element = 'title'
|
||||
AND mfr.qualifier IS NULL
|
||||
AND mv.text_value LIKE 'ORIGINAL'
|
||||
AND wfi.owner IS NOT NULL
|
||||
AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6)
|
||||
AND NOT EXISTS(
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
resourcepolicy_seq.NEXTVAL AS policy_id,
|
||||
'1' AS resource_type_id,
|
||||
'1' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
wfi.owner AS eperson_id,
|
||||
i2b.bundle_id AS dspace_object
|
||||
FROM workflowitem wfi
|
||||
JOIN item2bundle i2b
|
||||
ON i2b.item_id = wfi.item_id
|
||||
JOIN metadatavalue mv
|
||||
ON mv.resource_id = i2b.bundle_id
|
||||
JOIN metadatafieldregistry mfr
|
||||
ON mv.metadata_field_id = mfr.metadata_field_id
|
||||
JOIN metadataschemaregistry msr
|
||||
ON mfr.metadata_schema_id = msr.metadata_schema_id
|
||||
WHERE
|
||||
msr.namespace = 'http://dublincore.org/documents/dcmi-terms/'
|
||||
AND mfr.element = 'title'
|
||||
AND mfr.qualifier IS NULL
|
||||
AND mv.text_value LIKE 'ORIGINAL'
|
||||
AND wfi.owner IS NOT NULL
|
||||
AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6)
|
||||
AND NOT EXISTS(
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
resourcepolicy_seq.NEXTVAL AS policy_id,
|
||||
'1' AS resource_type_id,
|
||||
'2' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
wfi.owner AS eperson_id,
|
||||
i2b.bundle_id AS dspace_object
|
||||
FROM workflowitem wfi
|
||||
JOIN item2bundle i2b
|
||||
ON i2b.item_id = wfi.item_id
|
||||
JOIN metadatavalue mv
|
||||
ON mv.resource_id = i2b.bundle_id
|
||||
JOIN metadatafieldregistry mfr
|
||||
ON mv.metadata_field_id = mfr.metadata_field_id
|
||||
JOIN metadataschemaregistry msr
|
||||
ON mfr.metadata_schema_id = msr.metadata_schema_id
|
||||
WHERE
|
||||
msr.namespace = 'http://dublincore.org/documents/dcmi-terms/'
|
||||
AND mfr.element = 'title'
|
||||
AND mfr.qualifier IS NULL
|
||||
AND mv.text_value LIKE 'ORIGINAL'
|
||||
AND wfi.owner IS NOT NULL
|
||||
AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6)
|
||||
AND NOT EXISTS(
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
resourcepolicy_seq.NEXTVAL AS policy_id,
|
||||
'1' AS resource_type_id,
|
||||
'3' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
wfi.owner AS eperson_id,
|
||||
i2b.bundle_id AS dspace_object
|
||||
FROM workflowitem wfi
|
||||
JOIN item2bundle i2b
|
||||
ON i2b.item_id = wfi.item_id
|
||||
JOIN metadatavalue mv
|
||||
ON mv.resource_id = i2b.bundle_id
|
||||
JOIN metadatafieldregistry mfr
|
||||
ON mv.metadata_field_id = mfr.metadata_field_id
|
||||
JOIN metadataschemaregistry msr
|
||||
ON mfr.metadata_schema_id = msr.metadata_schema_id
|
||||
WHERE
|
||||
msr.namespace = 'http://dublincore.org/documents/dcmi-terms/'
|
||||
AND mfr.element = 'title'
|
||||
AND mfr.qualifier IS NULL
|
||||
AND mv.text_value LIKE 'ORIGINAL'
|
||||
AND wfi.owner IS NOT NULL
|
||||
AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6)
|
||||
AND NOT EXISTS(
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
resourcepolicy_seq.NEXTVAL AS policy_id,
|
||||
'1' AS resource_type_id,
|
||||
'4' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
wfi.owner AS eperson_id,
|
||||
i2b.bundle_id AS dspace_object
|
||||
FROM workflowitem wfi
|
||||
JOIN item2bundle i2b
|
||||
ON i2b.item_id = wfi.item_id
|
||||
JOIN metadatavalue mv
|
||||
ON mv.resource_id = i2b.bundle_id
|
||||
JOIN metadatafieldregistry mfr
|
||||
ON mv.metadata_field_id = mfr.metadata_field_id
|
||||
JOIN metadataschemaregistry msr
|
||||
ON mfr.metadata_schema_id = msr.metadata_schema_id
|
||||
WHERE
|
||||
msr.namespace = 'http://dublincore.org/documents/dcmi-terms/'
|
||||
AND mfr.element = 'title'
|
||||
AND mfr.qualifier IS NULL
|
||||
AND mv.text_value LIKE 'ORIGINAL'
|
||||
AND wfi.owner IS NOT NULL
|
||||
AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6)
|
||||
AND NOT EXISTS(
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id
|
||||
);
|
||||
|
||||
|
||||
-------------------------------------------------------------------------------
|
||||
-- grant read/write/delete/add/remove permission on all Bitstreams of Bundle --
|
||||
-- ORIGINAL to reviewers --
|
||||
-------------------------------------------------------------------------------
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
resourcepolicy_seq.NEXTVAL AS policy_id,
|
||||
'0' AS resource_type_id,
|
||||
'0' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
wfi.owner AS eperson_id,
|
||||
b2b.bitstream_id AS dspace_object
|
||||
FROM workflowitem wfi
|
||||
JOIN item2bundle i2b
|
||||
ON i2b.item_id = wfi.item_id
|
||||
JOIN bundle2bitstream b2b
|
||||
ON b2b.bundle_id = i2b.bundle_id
|
||||
JOIN metadatavalue mv
|
||||
ON mv.resource_id = i2b.bundle_id
|
||||
JOIN metadatafieldregistry mfr
|
||||
ON mv.metadata_field_id = mfr.metadata_field_id
|
||||
JOIN metadataschemaregistry msr
|
||||
ON mfr.metadata_schema_id = msr.metadata_schema_id
|
||||
WHERE
|
||||
msr.namespace = 'http://dublincore.org/documents/dcmi-terms/'
|
||||
AND mfr.element = 'title'
|
||||
AND mfr.qualifier IS NULL
|
||||
AND mv.text_value LIKE 'ORIGINAL'
|
||||
AND wfi.owner IS NOT NULL
|
||||
AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6)
|
||||
AND NOT EXISTS(
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
resourcepolicy_seq.NEXTVAL AS policy_id,
|
||||
'0' AS resource_type_id,
|
||||
'1' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
wfi.owner AS eperson_id,
|
||||
b2b.bitstream_id AS dspace_object
|
||||
FROM workflowitem wfi
|
||||
JOIN item2bundle i2b
|
||||
ON i2b.item_id = wfi.item_id
|
||||
JOIN bundle2bitstream b2b
|
||||
ON b2b.bundle_id = i2b.bundle_id
|
||||
JOIN metadatavalue mv
|
||||
ON mv.resource_id = i2b.bundle_id
|
||||
JOIN metadatafieldregistry mfr
|
||||
ON mv.metadata_field_id = mfr.metadata_field_id
|
||||
JOIN metadataschemaregistry msr
|
||||
ON mfr.metadata_schema_id = msr.metadata_schema_id
|
||||
WHERE
|
||||
msr.namespace = 'http://dublincore.org/documents/dcmi-terms/'
|
||||
AND mfr.element = 'title'
|
||||
AND mfr.qualifier IS NULL
|
||||
AND mv.text_value LIKE 'ORIGINAL'
|
||||
AND wfi.owner IS NOT NULL
|
||||
AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6)
|
||||
AND NOT EXISTS(
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
resourcepolicy_seq.NEXTVAL policy_id,
|
||||
'0' AS resource_type_id,
|
||||
'2' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
wfi.owner AS eperson_id,
|
||||
b2b.bitstream_id AS dspace_object
|
||||
FROM workflowitem wfi
|
||||
JOIN item2bundle i2b
|
||||
ON i2b.item_id = wfi.item_id
|
||||
JOIN bundle2bitstream b2b
|
||||
ON b2b.bundle_id = i2b.bundle_id
|
||||
JOIN metadatavalue mv
|
||||
ON mv.resource_id = i2b.bundle_id
|
||||
JOIN metadatafieldregistry mfr
|
||||
ON mv.metadata_field_id = mfr.metadata_field_id
|
||||
JOIN metadataschemaregistry msr
|
||||
ON mfr.metadata_schema_id = msr.metadata_schema_id
|
||||
WHERE
|
||||
msr.namespace = 'http://dublincore.org/documents/dcmi-terms/'
|
||||
AND mfr.element = 'title'
|
||||
AND mfr.qualifier IS NULL
|
||||
AND mv.text_value LIKE 'ORIGINAL'
|
||||
AND wfi.owner IS NOT NULL
|
||||
AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6)
|
||||
AND NOT EXISTS(
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
resourcepolicy_seq.NEXTVAL AS policy_id,
|
||||
'0' AS resource_type_id,
|
||||
'3' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
wfi.owner AS eperson_id,
|
||||
b2b.bitstream_id AS dspace_object
|
||||
FROM workflowitem wfi
|
||||
JOIN item2bundle i2b
|
||||
ON i2b.item_id = wfi.item_id
|
||||
JOIN bundle2bitstream b2b
|
||||
ON b2b.bundle_id = i2b.bundle_id
|
||||
JOIN metadatavalue mv
|
||||
ON mv.resource_id = i2b.bundle_id
|
||||
JOIN metadatafieldregistry mfr
|
||||
ON mv.metadata_field_id = mfr.metadata_field_id
|
||||
JOIN metadataschemaregistry msr
|
||||
ON mfr.metadata_schema_id = msr.metadata_schema_id
|
||||
WHERE
|
||||
msr.namespace = 'http://dublincore.org/documents/dcmi-terms/'
|
||||
AND mfr.element = 'title'
|
||||
AND mfr.qualifier IS NULL
|
||||
AND mv.text_value LIKE 'ORIGINAL'
|
||||
AND wfi.owner IS NOT NULL
|
||||
AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6)
|
||||
AND NOT EXISTS(
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
resourcepolicy_seq.NEXTVAL AS policy_id,
|
||||
'0' AS resource_type_id,
|
||||
'4' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
wfi.owner AS eperson_id,
|
||||
b2b.bitstream_id AS dspace_object
|
||||
FROM workflowitem wfi
|
||||
JOIN item2bundle i2b
|
||||
ON i2b.item_id = wfi.item_id
|
||||
JOIN bundle2bitstream b2b
|
||||
ON b2b.bundle_id = i2b.bundle_id
|
||||
JOIN metadatavalue mv
|
||||
ON mv.resource_id = i2b.bundle_id
|
||||
JOIN metadatafieldregistry mfr
|
||||
ON mv.metadata_field_id = mfr.metadata_field_id
|
||||
JOIN metadataschemaregistry msr
|
||||
ON mfr.metadata_schema_id = msr.metadata_schema_id
|
||||
WHERE
|
||||
msr.namespace = 'http://dublincore.org/documents/dcmi-terms/'
|
||||
AND mfr.element = 'title'
|
||||
AND mfr.qualifier IS NULL
|
||||
AND mv.text_value LIKE 'ORIGINAL'
|
||||
AND wfi.owner IS NOT NULL
|
||||
AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6)
|
||||
AND NOT EXISTS(
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id
|
||||
);
|
@@ -0,0 +1,503 @@
|
||||
--
|
||||
-- The contents of this file are subject to the license and copyright
|
||||
-- detailed in the LICENSE and NOTICE files at the root of the source
|
||||
-- tree and available online at
|
||||
--
|
||||
-- http://www.dspace.org/license/
|
||||
--
|
||||
|
||||
-------------------------------------------------------------------------
|
||||
-- DS-3431 Workflow system is vulnerable to unauthorized manipulations --
|
||||
-------------------------------------------------------------------------
|
||||
|
||||
-----------------------------------------------------------------------
|
||||
-- grant claiming permissions to all workflow step groups (step 1-3) --
|
||||
-----------------------------------------------------------------------
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id)
|
||||
SELECT
|
||||
nextval('resourcepolicy_seq') AS policy_id,
|
||||
'3' AS resource_type_id,
|
||||
'5' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
workflow_step_1 AS epersongroup_id,
|
||||
collection_id
|
||||
FROM collection
|
||||
WHERE workflow_step_1 IS NOT NULL
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 5 AND epersongroup_id = workflow_step_1 and resource_id = collection_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id)
|
||||
SELECT
|
||||
nextval('resourcepolicy_seq') AS policy_id,
|
||||
'3' AS resource_type_id,
|
||||
'6' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
workflow_step_2 AS epersongroup_id,
|
||||
collection_id
|
||||
FROM collection
|
||||
WHERE workflow_step_2 IS NOT NULL
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 6 AND epersongroup_id = workflow_step_2 and resource_id = collection_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id)
|
||||
SELECT
|
||||
nextval('resourcepolicy_seq') AS policy_id,
|
||||
'3' AS resource_type_id,
|
||||
'7' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
workflow_step_3 AS epersongroup_id,
|
||||
collection_id
|
||||
FROM collection
|
||||
WHERE workflow_step_3 IS NOT NULL
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 7 AND epersongroup_id = workflow_step_3 and resource_id = collection_id
|
||||
);
|
||||
|
||||
-----------------------------------------------------------------------
|
||||
-- grant add permissions to all workflow step groups (step 1-3) --
|
||||
-----------------------------------------------------------------------
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id)
|
||||
SELECT
|
||||
nextval('resourcepolicy_seq') AS policy_id,
|
||||
'3' AS resource_type_id,
|
||||
'3' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
workflow_step_1 AS epersongroup_id,
|
||||
collection_id
|
||||
FROM collection
|
||||
WHERE workflow_step_1 IS NOT NULL
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_1 and resource_id = collection_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id)
|
||||
SELECT
|
||||
nextval('resourcepolicy_seq') AS policy_id,
|
||||
'3' AS resource_type_id,
|
||||
'3' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
workflow_step_2 AS epersongroup_id,
|
||||
collection_id
|
||||
FROM collection
|
||||
WHERE workflow_step_2 IS NOT NULL
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_2 and resource_id = collection_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id)
|
||||
SELECT
|
||||
nextval('resourcepolicy_seq') AS policy_id,
|
||||
'3' AS resource_type_id,
|
||||
'3' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
workflow_step_3 AS epersongroup_id,
|
||||
collection_id
|
||||
FROM collection
|
||||
WHERE workflow_step_3 IS NOT NULL
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_3 and resource_id = collection_id
|
||||
);
|
||||
|
||||
----------------------------------------------------------------------------------
|
||||
-- grant read/write/delete/add/remove permission on workflow items to reviewers --
|
||||
----------------------------------------------------------------------------------
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
nextval('resourcepolicy_seq') AS policy_id,
|
||||
'2' AS resource_type_id,
|
||||
'0' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
owner AS eperson_id,
|
||||
item_id
|
||||
FROM workflowitem
|
||||
WHERE
|
||||
owner IS NOT NULL
|
||||
AND (state = 2 OR state = 4 OR state = 6)
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 0 AND eperson_id = owner AND resource_id = item_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
nextval('resourcepolicy_seq') AS policy_id,
|
||||
'2' AS resource_type_id,
|
||||
'1' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
owner AS eperson_id,
|
||||
item_id
|
||||
FROM workflowitem
|
||||
WHERE
|
||||
owner IS NOT NULL
|
||||
AND (state = 2 OR state = 4 OR state = 6)
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 1 AND eperson_id = owner AND resource_id = item_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
nextval('resourcepolicy_seq') AS policy_id,
|
||||
'2' AS resource_type_id,
|
||||
'2' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
owner AS eperson_id,
|
||||
item_id
|
||||
FROM workflowitem
|
||||
WHERE
|
||||
owner IS NOT NULL
|
||||
AND (state = 2 OR state = 4 OR state = 6)
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 2 AND eperson_id = owner AND resource_id = item_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
nextval('resourcepolicy_seq') AS policy_id,
|
||||
'2' AS resource_type_id,
|
||||
'3' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
owner AS eperson_id,
|
||||
item_id
|
||||
FROM workflowitem
|
||||
WHERE
|
||||
owner IS NOT NULL
|
||||
AND (state = 2 OR state = 4 OR state = 6)
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 3 AND eperson_id = owner AND resource_id = item_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
nextval('resourcepolicy_seq') AS policy_id,
|
||||
'2' AS resource_type_id,
|
||||
'4' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
owner AS eperson_id,
|
||||
item_id
|
||||
FROM workflowitem
|
||||
WHERE
|
||||
owner IS NOT NULL
|
||||
AND (state = 2 OR state = 4 OR state = 6)
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 4 AND eperson_id = owner AND resource_id = item_id
|
||||
);
|
||||
|
||||
-----------------------------------------------------------------------------------
|
||||
-- grant read/write/delete/add/remove permission on Bundle ORIGINAL to reviewers --
|
||||
-----------------------------------------------------------------------------------
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
nextval('resourcepolicy_seq') AS policy_id,
|
||||
'1' AS resource_type_id,
|
||||
'0' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
wfi.owner AS eperson_id,
|
||||
i2b.bundle_id AS dspace_object
|
||||
FROM workflowitem AS wfi
|
||||
JOIN item2bundle AS i2b
|
||||
ON i2b.item_id = wfi.item_id
|
||||
JOIN metadatavalue AS mv
|
||||
ON mv.resource_id = i2b.bundle_id
|
||||
JOIN metadatafieldregistry as mfr
|
||||
ON mv.metadata_field_id = mfr.metadata_field_id
|
||||
JOIN metadataschemaregistry as msr
|
||||
ON mfr.metadata_schema_id = msr.metadata_schema_id
|
||||
WHERE
|
||||
msr.namespace = 'http://dublincore.org/documents/dcmi-terms/'
|
||||
AND mfr.element = 'title'
|
||||
AND mfr.qualifier IS NULL
|
||||
AND mv.text_value = 'ORIGINAL'
|
||||
AND wfi.owner IS NOT NULL
|
||||
AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6)
|
||||
AND NOT EXISTS(
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
nextval('resourcepolicy_seq') AS policy_id,
|
||||
'1' AS resource_type_id,
|
||||
'1' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
wfi.owner AS eperson_id,
|
||||
i2b.bundle_id AS dspace_object
|
||||
FROM workflowitem AS wfi
|
||||
JOIN item2bundle AS i2b
|
||||
ON i2b.item_id = wfi.item_id
|
||||
JOIN metadatavalue AS mv
|
||||
ON mv.resource_id = i2b.bundle_id
|
||||
JOIN metadatafieldregistry as mfr
|
||||
ON mv.metadata_field_id = mfr.metadata_field_id
|
||||
JOIN metadataschemaregistry as msr
|
||||
ON mfr.metadata_schema_id = msr.metadata_schema_id
|
||||
WHERE
|
||||
msr.namespace = 'http://dublincore.org/documents/dcmi-terms/'
|
||||
AND mfr.element = 'title'
|
||||
AND mfr.qualifier IS NULL
|
||||
AND mv.text_value = 'ORIGINAL'
|
||||
AND wfi.owner IS NOT NULL
|
||||
AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6)
|
||||
AND NOT EXISTS(
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
nextval('resourcepolicy_seq') AS policy_id,
|
||||
'1' AS resource_type_id,
|
||||
'2' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
wfi.owner AS eperson_id,
|
||||
i2b.bundle_id AS dspace_object
|
||||
FROM workflowitem AS wfi
|
||||
JOIN item2bundle AS i2b
|
||||
ON i2b.item_id = wfi.item_id
|
||||
JOIN metadatavalue AS mv
|
||||
ON mv.resource_id = i2b.bundle_id
|
||||
JOIN metadatafieldregistry as mfr
|
||||
ON mv.metadata_field_id = mfr.metadata_field_id
|
||||
JOIN metadataschemaregistry as msr
|
||||
ON mfr.metadata_schema_id = msr.metadata_schema_id
|
||||
WHERE
|
||||
msr.namespace = 'http://dublincore.org/documents/dcmi-terms/'
|
||||
AND mfr.element = 'title'
|
||||
AND mfr.qualifier IS NULL
|
||||
AND mv.text_value = 'ORIGINAL'
|
||||
AND wfi.owner IS NOT NULL
|
||||
AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6)
|
||||
AND NOT EXISTS(
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
nextval('resourcepolicy_seq') AS policy_id,
|
||||
'1' AS resource_type_id,
|
||||
'3' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
wfi.owner AS eperson_id,
|
||||
i2b.bundle_id AS dspace_object
|
||||
FROM workflowitem AS wfi
|
||||
JOIN item2bundle AS i2b
|
||||
ON i2b.item_id = wfi.item_id
|
||||
JOIN metadatavalue AS mv
|
||||
ON mv.resource_id = i2b.bundle_id
|
||||
JOIN metadatafieldregistry as mfr
|
||||
ON mv.metadata_field_id = mfr.metadata_field_id
|
||||
JOIN metadataschemaregistry as msr
|
||||
ON mfr.metadata_schema_id = msr.metadata_schema_id
|
||||
WHERE
|
||||
msr.namespace = 'http://dublincore.org/documents/dcmi-terms/'
|
||||
AND mfr.element = 'title'
|
||||
AND mfr.qualifier IS NULL
|
||||
AND mv.text_value = 'ORIGINAL'
|
||||
AND wfi.owner IS NOT NULL
|
||||
AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6)
|
||||
AND NOT EXISTS(
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
nextval('resourcepolicy_seq') AS policy_id,
|
||||
'1' AS resource_type_id,
|
||||
'4' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
wfi.owner AS eperson_id,
|
||||
i2b.bundle_id AS dspace_object
|
||||
FROM workflowitem AS wfi
|
||||
JOIN item2bundle AS i2b
|
||||
ON i2b.item_id = wfi.item_id
|
||||
JOIN metadatavalue AS mv
|
||||
ON mv.resource_id = i2b.bundle_id
|
||||
JOIN metadatafieldregistry as mfr
|
||||
ON mv.metadata_field_id = mfr.metadata_field_id
|
||||
JOIN metadataschemaregistry as msr
|
||||
ON mfr.metadata_schema_id = msr.metadata_schema_id
|
||||
WHERE
|
||||
msr.namespace = 'http://dublincore.org/documents/dcmi-terms/'
|
||||
AND mfr.element = 'title'
|
||||
AND mfr.qualifier IS NULL
|
||||
AND mv.text_value = 'ORIGINAL'
|
||||
AND wfi.owner IS NOT NULL
|
||||
AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6)
|
||||
AND NOT EXISTS(
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id
|
||||
);
|
||||
|
||||
|
||||
-------------------------------------------------------------------------------
|
||||
-- grant read/write/delete/add/remove permission on all Bitstreams of Bundle --
|
||||
-- ORIGINAL to reviewers --
|
||||
-------------------------------------------------------------------------------
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
nextval('resourcepolicy_seq') AS policy_id,
|
||||
'0' AS resource_type_id,
|
||||
'0' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
wfi.owner AS eperson_id,
|
||||
b2b.bitstream_id AS dspace_object
|
||||
FROM workflowitem AS wfi
|
||||
JOIN item2bundle AS i2b
|
||||
ON i2b.item_id = wfi.item_id
|
||||
JOIN bundle2bitstream AS b2b
|
||||
ON b2b.bundle_id = i2b.bundle_id
|
||||
JOIN metadatavalue AS mv
|
||||
ON mv.resource_id = i2b.bundle_id
|
||||
JOIN metadatafieldregistry as mfr
|
||||
ON mv.metadata_field_id = mfr.metadata_field_id
|
||||
JOIN metadataschemaregistry as msr
|
||||
ON mfr.metadata_schema_id = msr.metadata_schema_id
|
||||
WHERE
|
||||
msr.namespace = 'http://dublincore.org/documents/dcmi-terms/'
|
||||
AND mfr.element = 'title'
|
||||
AND mfr.qualifier IS NULL
|
||||
AND mv.text_value = 'ORIGINAL'
|
||||
AND wfi.owner IS NOT NULL
|
||||
AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6)
|
||||
AND NOT EXISTS(
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
nextval('resourcepolicy_seq') AS policy_id,
|
||||
'0' AS resource_type_id,
|
||||
'1' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
wfi.owner AS eperson_id,
|
||||
b2b.bitstream_id AS dspace_object
|
||||
FROM workflowitem AS wfi
|
||||
JOIN item2bundle AS i2b
|
||||
ON i2b.item_id = wfi.item_id
|
||||
JOIN bundle2bitstream AS b2b
|
||||
ON b2b.bundle_id = i2b.bundle_id
|
||||
JOIN metadatavalue AS mv
|
||||
ON mv.resource_id = i2b.bundle_id
|
||||
JOIN metadatafieldregistry as mfr
|
||||
ON mv.metadata_field_id = mfr.metadata_field_id
|
||||
JOIN metadataschemaregistry as msr
|
||||
ON mfr.metadata_schema_id = msr.metadata_schema_id
|
||||
WHERE
|
||||
msr.namespace = 'http://dublincore.org/documents/dcmi-terms/'
|
||||
AND mfr.element = 'title'
|
||||
AND mfr.qualifier IS NULL
|
||||
AND mv.text_value = 'ORIGINAL'
|
||||
AND wfi.owner IS NOT NULL
|
||||
AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6)
|
||||
AND NOT EXISTS(
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
nextval('resourcepolicy_seq') AS policy_id,
|
||||
'0' AS resource_type_id,
|
||||
'2' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
wfi.owner AS eperson_id,
|
||||
b2b.bitstream_id AS dspace_object
|
||||
FROM workflowitem AS wfi
|
||||
JOIN item2bundle AS i2b
|
||||
ON i2b.item_id = wfi.item_id
|
||||
JOIN bundle2bitstream AS b2b
|
||||
ON b2b.bundle_id = i2b.bundle_id
|
||||
JOIN metadatavalue AS mv
|
||||
ON mv.resource_id = i2b.bundle_id
|
||||
JOIN metadatafieldregistry as mfr
|
||||
ON mv.metadata_field_id = mfr.metadata_field_id
|
||||
JOIN metadataschemaregistry as msr
|
||||
ON mfr.metadata_schema_id = msr.metadata_schema_id
|
||||
WHERE
|
||||
msr.namespace = 'http://dublincore.org/documents/dcmi-terms/'
|
||||
AND mfr.element = 'title'
|
||||
AND mfr.qualifier IS NULL
|
||||
AND mv.text_value = 'ORIGINAL'
|
||||
AND wfi.owner IS NOT NULL
|
||||
AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6)
|
||||
AND NOT EXISTS(
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
nextval('resourcepolicy_seq') AS policy_id,
|
||||
'0' AS resource_type_id,
|
||||
'3' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
wfi.owner AS eperson_id,
|
||||
b2b.bitstream_id AS dspace_object
|
||||
FROM workflowitem AS wfi
|
||||
JOIN item2bundle AS i2b
|
||||
ON i2b.item_id = wfi.item_id
|
||||
JOIN bundle2bitstream AS b2b
|
||||
ON b2b.bundle_id = i2b.bundle_id
|
||||
JOIN metadatavalue AS mv
|
||||
ON mv.resource_id = i2b.bundle_id
|
||||
JOIN metadatafieldregistry as mfr
|
||||
ON mv.metadata_field_id = mfr.metadata_field_id
|
||||
JOIN metadataschemaregistry as msr
|
||||
ON mfr.metadata_schema_id = msr.metadata_schema_id
|
||||
WHERE
|
||||
msr.namespace = 'http://dublincore.org/documents/dcmi-terms/'
|
||||
AND mfr.element = 'title'
|
||||
AND mfr.qualifier IS NULL
|
||||
AND mv.text_value = 'ORIGINAL'
|
||||
AND wfi.owner IS NOT NULL
|
||||
AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6)
|
||||
AND NOT EXISTS(
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id
|
||||
);
|
||||
|
||||
INSERT INTO resourcepolicy
|
||||
(policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id)
|
||||
SELECT
|
||||
nextval('resourcepolicy_seq') AS policy_id,
|
||||
'0' AS resource_type_id,
|
||||
'4' AS action_id,
|
||||
'TYPE_WORKFLOW' AS rptype,
|
||||
wfi.owner AS eperson_id,
|
||||
b2b.bitstream_id AS dspace_object
|
||||
FROM workflowitem AS wfi
|
||||
JOIN item2bundle AS i2b
|
||||
ON i2b.item_id = wfi.item_id
|
||||
JOIN bundle2bitstream AS b2b
|
||||
ON b2b.bundle_id = i2b.bundle_id
|
||||
JOIN metadatavalue AS mv
|
||||
ON mv.resource_id = i2b.bundle_id
|
||||
JOIN metadatafieldregistry as mfr
|
||||
ON mv.metadata_field_id = mfr.metadata_field_id
|
||||
JOIN metadataschemaregistry as msr
|
||||
ON mfr.metadata_schema_id = msr.metadata_schema_id
|
||||
WHERE
|
||||
msr.namespace = 'http://dublincore.org/documents/dcmi-terms/'
|
||||
AND mfr.element = 'title'
|
||||
AND mfr.qualifier IS NULL
|
||||
AND mv.text_value = 'ORIGINAL'
|
||||
AND wfi.owner IS NOT NULL
|
||||
AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6)
|
||||
AND NOT EXISTS(
|
||||
SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id
|
||||
);
|
406
dspace-api/src/test/data/dspaceFolder/config/input-forms.xml
Normal file
406
dspace-api/src/test/data/dspaceFolder/config/input-forms.xml
Normal file
@@ -0,0 +1,406 @@
|
||||
<?xml version="1.0"?>
|
||||
<!DOCTYPE input-forms SYSTEM "input-forms.dtd">
|
||||
|
||||
|
||||
<input-forms>
|
||||
|
||||
<!-- The form-map maps collection handles to forms. DSpace does not -->
|
||||
<!-- require a collection name to be unique, not even within a community. -->
|
||||
<!-- DSpace does however ensure that each collection's handle is unique. -->
|
||||
<!-- Form-map provides the means to associate a unique collection name -->
|
||||
<!-- with a form. The form-map also provides the special handle "default" -->
|
||||
<!-- (which is never a collection), here mapped to "traditional". Any -->
|
||||
<!-- collection which does not appear in this map will be associated with -->
|
||||
<!-- the mapping for handle "default". -->
|
||||
|
||||
<form-map>
|
||||
<name-map collection-handle="default" form-name="traditional" />
|
||||
</form-map>
|
||||
|
||||
|
||||
<!-- The form-definitions map lays out the detailed definition of all the -->
|
||||
<!-- submission forms. Each separate form set has a unique name as an -->
|
||||
<!-- attribute. This name matches one of the names in the form-map. One -->
|
||||
<!-- named form set has the name "traditional"; as this name suggests, -->
|
||||
<!-- it is the old style and is also the default, which gets used when -->
|
||||
<!-- the specified collection has no correspondingly-named form set. -->
|
||||
<!-- -->
|
||||
<!-- Each form set contains an ordered set of pages; each page defines -->
|
||||
<!-- one submission metadata entry screen. Each page has an ordered list -->
|
||||
<!-- of field definitions, Each field definition corresponds to one -->
|
||||
<!-- metadata entry (a so-called row), which has a DC element name, a -->
|
||||
<!-- displayed label, a text string prompt which is called a hint, and -->
|
||||
<!-- an input-type. Each field also may hold optional elements: DC -->
|
||||
<!-- qualifier name, a repeatable flag, and a text string whose presence -->
|
||||
<!-- serves as a 'this field is required' flag. -->
|
||||
|
||||
<form-definitions>
|
||||
|
||||
<form name="traditional">
|
||||
<page number="1">
|
||||
<field>
|
||||
<dc-schema>dc</dc-schema>
|
||||
<dc-element>contributor</dc-element>
|
||||
<dc-qualifier>author</dc-qualifier>
|
||||
<repeatable>true</repeatable>
|
||||
<label>Authors</label>
|
||||
<input-type>name</input-type>
|
||||
<hint>Enter the names of the authors of this item.</hint>
|
||||
<required></required>
|
||||
</field>
|
||||
|
||||
<field>
|
||||
<dc-schema>dc</dc-schema>
|
||||
<dc-element>title</dc-element>
|
||||
<dc-qualifier></dc-qualifier>
|
||||
<repeatable>false</repeatable>
|
||||
<label>Title</label>
|
||||
<input-type>onebox</input-type>
|
||||
<hint>Enter the main title of the item.</hint>
|
||||
<required>You must enter a main title for this item.</required>
|
||||
</field>
|
||||
|
||||
<field>
|
||||
<dc-schema>dc</dc-schema>
|
||||
<dc-element>title</dc-element>
|
||||
<dc-qualifier>alternative</dc-qualifier>
|
||||
<repeatable>true</repeatable>
|
||||
<label>Other Titles</label>
|
||||
<input-type>onebox</input-type>
|
||||
<hint>If the item has any alternative titles, please enter them here.</hint>
|
||||
<required></required>
|
||||
</field>
|
||||
|
||||
<field>
|
||||
<dc-schema>dc</dc-schema>
|
||||
<dc-element>date</dc-element>
|
||||
<dc-qualifier>issued</dc-qualifier>
|
||||
<repeatable>false</repeatable>
|
||||
<label>Date of Issue</label>
|
||||
<input-type>date</input-type>
|
||||
<hint>Please give the date of previous publication or public distribution.
|
||||
You can leave out the day and/or month if they aren't
|
||||
applicable.</hint>
|
||||
<required>You must enter at least the year.</required>
|
||||
</field>
|
||||
|
||||
<field>
|
||||
<dc-schema>dc</dc-schema>
|
||||
<dc-element>publisher</dc-element>
|
||||
<dc-qualifier></dc-qualifier>
|
||||
<repeatable>false</repeatable>
|
||||
<label>Publisher</label>
|
||||
<input-type>onebox</input-type>
|
||||
<hint>Enter the name of the publisher of the previously issued instance of this item.</hint>
|
||||
<required></required>
|
||||
</field>
|
||||
|
||||
<field>
|
||||
<dc-schema>dc</dc-schema>
|
||||
<dc-element>identifier</dc-element>
|
||||
<dc-qualifier>citation</dc-qualifier>
|
||||
<repeatable>false</repeatable>
|
||||
<label>Citation</label>
|
||||
<input-type>onebox</input-type>
|
||||
<hint>Enter the standard citation for the previously issued instance of this item.</hint>
|
||||
<required></required>
|
||||
</field>
|
||||
|
||||
<field>
|
||||
<dc-schema>dc</dc-schema>
|
||||
<dc-element>relation</dc-element>
|
||||
<dc-qualifier>ispartofseries</dc-qualifier>
|
||||
<repeatable>true</repeatable>
|
||||
<label>Series/Report No.</label>
|
||||
<input-type>series</input-type>
|
||||
<hint>Enter the series and number assigned to this item by your community.</hint>
|
||||
<required></required>
|
||||
</field>
|
||||
|
||||
<field>
|
||||
<dc-schema>dc</dc-schema>
|
||||
<dc-element>identifier</dc-element>
|
||||
<dc-qualifier></dc-qualifier>
|
||||
<!-- An input-type of qualdrop_value MUST be marked as repeatable -->
|
||||
<repeatable>true</repeatable>
|
||||
<label>Identifiers</label>
|
||||
<input-type value-pairs-name="common_identifiers">qualdrop_value</input-type>
|
||||
<hint>If the item has any identification numbers or codes associated with
|
||||
it, please enter the types and the actual numbers or codes.</hint>
|
||||
<required></required>
|
||||
</field>
|
||||
|
||||
<field>
|
||||
<dc-schema>dc</dc-schema>
|
||||
<dc-element>type</dc-element>
|
||||
<dc-qualifier></dc-qualifier>
|
||||
<repeatable>true</repeatable>
|
||||
<label>Type</label>
|
||||
<input-type value-pairs-name="common_types">dropdown</input-type>
|
||||
<hint>Select the type(s) of content of the item. To select more than one value in the list, you may have to hold down the "CTRL" or "Shift" key.</hint>
|
||||
<required></required>
|
||||
</field>
|
||||
|
||||
<field>
|
||||
<dc-schema>dc</dc-schema>
|
||||
<dc-element>language</dc-element>
|
||||
<dc-qualifier>iso</dc-qualifier>
|
||||
<repeatable>false</repeatable>
|
||||
<label>Language</label>
|
||||
<input-type value-pairs-name="common_iso_languages">dropdown</input-type>
|
||||
<hint>Select the language of the main content of the item. If the language does not appear in the list, please select 'Other'. If the content does not really have a language (for example, if it is a dataset or an image) please select 'N/A'.</hint>
|
||||
<required></required>
|
||||
</field>
|
||||
</page>
|
||||
|
||||
<page number="2">
|
||||
<field>
|
||||
<dc-schema>dc</dc-schema>
|
||||
<dc-element>subject</dc-element>
|
||||
<dc-qualifier></dc-qualifier>
|
||||
<!-- An input-type of twobox MUST be marked as repeatable -->
|
||||
<repeatable>true</repeatable>
|
||||
<label>Subject Keywords</label>
|
||||
<input-type>twobox</input-type>
|
||||
<hint>Enter appropriate subject keywords or phrases. </hint>
|
||||
<required></required>
|
||||
<vocabulary>srsc</vocabulary>
|
||||
</field>
|
||||
|
||||
<field>
|
||||
<dc-schema>dc</dc-schema>
|
||||
<dc-element>description</dc-element>
|
||||
<dc-qualifier>abstract</dc-qualifier>
|
||||
<repeatable>false</repeatable>
|
||||
<label>Abstract</label>
|
||||
<input-type>textarea</input-type>
|
||||
<hint>Enter the abstract of the item. </hint>
|
||||
<required></required>
|
||||
</field>
|
||||
|
||||
<field>
|
||||
<dc-schema>dc</dc-schema>
|
||||
<dc-element>description</dc-element>
|
||||
<dc-qualifier>sponsorship</dc-qualifier>
|
||||
<repeatable>false</repeatable>
|
||||
<label>Sponsors</label>
|
||||
<input-type>textarea</input-type>
|
||||
<hint>Enter the names of any sponsors and/or funding codes in the box. </hint>
|
||||
<required></required>
|
||||
</field>
|
||||
|
||||
<field>
|
||||
<dc-schema>dc</dc-schema>
|
||||
<dc-element>description</dc-element>
|
||||
<dc-qualifier></dc-qualifier>
|
||||
<repeatable>false</repeatable>
|
||||
<label>Description</label>
|
||||
<input-type>textarea</input-type>
|
||||
<hint>Enter any other description or comments in this box. </hint>
|
||||
<required></required>
|
||||
</field>
|
||||
</page>
|
||||
</form>
|
||||
|
||||
<form name="one">
|
||||
<page number="1">
|
||||
<field>
|
||||
<dc-schema>dc</dc-schema>
|
||||
<dc-element>contributor</dc-element>
|
||||
<dc-qualifier>author</dc-qualifier>
|
||||
<repeatable>true</repeatable>
|
||||
<label>One: Authors</label>
|
||||
<input-type>name</input-type>
|
||||
<hint>Enter the names of the authors of this item.</hint>
|
||||
<required></required>
|
||||
</field>
|
||||
</page>
|
||||
</form>
|
||||
</form-definitions>
|
||||
|
||||
|
||||
<!-- form-value-pairs populate dropdown and qualdrop-value lists. -->
|
||||
<!-- The form-value-pairs element holds child elements named 'value-pairs' -->
|
||||
<!-- A 'value-pairs' element has a value-pairs-name and a dc-term -->
|
||||
<!-- attribute. The dc-term attribute specifies which to which Dublin Core -->
|
||||
<!-- Term this set of value-pairs applies. -->
|
||||
<!-- Current dc-terms are: identifier-pairs, type-pairs, and -->
|
||||
<!-- language_iso-pairs. The name attribute matches a name -->
|
||||
<!-- in the form-map, above. -->
|
||||
<!-- A value-pair contains one 'pair' for each value displayed in the list -->
|
||||
<!-- Each pair contains a 'displayed-value' element and a 'stored-value' -->
|
||||
<!-- element. A UI list displays the displayed-values, but the program -->
|
||||
<!-- stores the associated stored-values in the database. -->
|
||||
|
||||
<form-value-pairs>
|
||||
<value-pairs value-pairs-name="common_identifiers" dc-term="identifier">
|
||||
<pair>
|
||||
<displayed-value>ISSN</displayed-value>
|
||||
<stored-value>issn</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Other</displayed-value>
|
||||
<stored-value>other</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>ISMN</displayed-value>
|
||||
<stored-value>ismn</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Gov't Doc #</displayed-value>
|
||||
<stored-value>govdoc</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>URI</displayed-value>
|
||||
<stored-value>uri</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>ISBN</displayed-value>
|
||||
<stored-value>isbn</stored-value>
|
||||
</pair>
|
||||
</value-pairs>
|
||||
|
||||
<value-pairs value-pairs-name="common_types" dc-term="type">
|
||||
<pair>
|
||||
<displayed-value>Animation</displayed-value>
|
||||
<stored-value>Animation</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Article</displayed-value>
|
||||
<stored-value>Article</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Book</displayed-value>
|
||||
<stored-value>Book</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Book chapter</displayed-value>
|
||||
<stored-value>Book chapter</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Dataset</displayed-value>
|
||||
<stored-value>Dataset</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Learning Object</displayed-value>
|
||||
<stored-value>Learning Object</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Image</displayed-value>
|
||||
<stored-value>Image</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Image, 3-D</displayed-value>
|
||||
<stored-value>Image, 3-D</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Map</displayed-value>
|
||||
<stored-value>Map</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Musical Score</displayed-value>
|
||||
<stored-value>Musical Score</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Plan or blueprint</displayed-value>
|
||||
<stored-value>Plan or blueprint</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Preprint</displayed-value>
|
||||
<stored-value>Preprint</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Presentation</displayed-value>
|
||||
<stored-value>Presentation</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Recording, acoustical</displayed-value>
|
||||
<stored-value>Recording, acoustical</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Recording, musical</displayed-value>
|
||||
<stored-value>Recording, musical</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Recording, oral</displayed-value>
|
||||
<stored-value>Recording, oral</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Software</displayed-value>
|
||||
<stored-value>Software</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Technical Report</displayed-value>
|
||||
<stored-value>Technical Report</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Thesis</displayed-value>
|
||||
<stored-value>Thesis</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Video</displayed-value>
|
||||
<stored-value>Video</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Working Paper</displayed-value>
|
||||
<stored-value>Working Paper</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Other</displayed-value>
|
||||
<stored-value>Other</stored-value>
|
||||
</pair>
|
||||
</value-pairs>
|
||||
|
||||
<!-- default language order: (from dspace 1.2.1)
|
||||
"en_US", "en", "es", "de", "fr", "it", "ja", "zh", "other", ""
|
||||
-->
|
||||
<value-pairs value-pairs-name="common_iso_languages" dc-term="language_iso">
|
||||
<pair>
|
||||
<displayed-value>N/A</displayed-value>
|
||||
<stored-value></stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>English (United States)</displayed-value>
|
||||
<stored-value>en_US</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>English</displayed-value>
|
||||
<stored-value>en</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Spanish</displayed-value>
|
||||
<stored-value>es</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>German</displayed-value>
|
||||
<stored-value>de</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>French</displayed-value>
|
||||
<stored-value>fr</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Italian</displayed-value>
|
||||
<stored-value>it</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Japanese</displayed-value>
|
||||
<stored-value>ja</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Chinese</displayed-value>
|
||||
<stored-value>zh</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>Turkish</displayed-value>
|
||||
<stored-value>tr</stored-value>
|
||||
</pair>
|
||||
<pair>
|
||||
<displayed-value>(Other)</displayed-value>
|
||||
<stored-value>other</stored-value>
|
||||
</pair>
|
||||
</value-pairs>
|
||||
|
||||
</form-value-pairs>
|
||||
|
||||
</input-forms>
|
@@ -96,7 +96,7 @@ public class DSpaceCSVTest extends AbstractUnitTest
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
assertThat("testDSpaceCSV Bad heading CSV", e.getMessage(), equalTo("Unknown metadata element in row 4: dc.contributor.foobar"));
|
||||
assertThat("testDSpaceCSV Bad heading CSV", e.getMessage(), equalTo("Unknown metadata element in column 4: dc.contributor.foobar"));
|
||||
}
|
||||
lines = dcsv.getCSVLinesAsStringArray();
|
||||
assertThat("testDSpaceCSV Good CSV", lines.length, equalTo(7));
|
||||
@@ -126,7 +126,7 @@ public class DSpaceCSVTest extends AbstractUnitTest
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
assertThat("testDSpaceCSV Bad heading CSV", e.getMessage(), equalTo("Unknown metadata schema in row 3: dcdc.title"));
|
||||
assertThat("testDSpaceCSV Bad heading CSV", e.getMessage(), equalTo("Unknown metadata schema in column 3: dcdc.title"));
|
||||
}
|
||||
|
||||
// Delete the test file
|
||||
|
@@ -0,0 +1,34 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.content;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
|
||||
/**
|
||||
* Give tests access to package-private operations on Collection.
|
||||
* @author mwood
|
||||
*/
|
||||
public class CollectionHelper
|
||||
{
|
||||
/**
|
||||
* Delete the Collection by calling {@link org.dspace.content.Collection#delete()}.
|
||||
*
|
||||
* @param collection to be deleted.
|
||||
* @throws SQLException passed through.
|
||||
* @throws AuthorizeException passed through.
|
||||
* @throws IOException passed through.
|
||||
*/
|
||||
static public void delete(Collection collection)
|
||||
throws SQLException, AuthorizeException, IOException
|
||||
{
|
||||
collection.delete();
|
||||
}
|
||||
}
|
@@ -476,6 +476,29 @@ public class CollectionTest extends AbstractDSpaceObjectTest
|
||||
assertThat("testSetWorkflowGroup 1",c.getWorkflowGroup(step), equalTo(g));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of setWorkflowGroup method, of class Collection.
|
||||
* The setWorkflowGroup adjusts the policies for the basic Workflow. This test
|
||||
* shall assure that no exception (e.g. ConcurrentModificationException) is
|
||||
* thrown during these adjustments.
|
||||
* @throws java.sql.SQLException passed through.
|
||||
* @throws org.dspace.authorize.AuthorizeException passed through.
|
||||
*/
|
||||
@Test
|
||||
public void testChangeWorkflowGroup()
|
||||
throws SQLException, AuthorizeException
|
||||
{
|
||||
context.turnOffAuthorisationSystem(); //must be an Admin to create a Group
|
||||
int step = 1;
|
||||
Group g1 = Group.create(context);
|
||||
Group g2 = Group.create(context);
|
||||
context.restoreAuthSystemState();
|
||||
c.setWorkflowGroup(step, g1);
|
||||
c.setWorkflowGroup(step, g2);
|
||||
assertThat("testSetWorkflowGroup 0", c.getWorkflowGroup(step), notNullValue());
|
||||
assertThat("testSetWorkflowGroup 1", c.getWorkflowGroup(step), equalTo(g2));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of getWorkflowGroup method, of class Collection.
|
||||
*/
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user