mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-07 10:04:21 +00:00
Compare commits
385 Commits
rest-tutor
...
dspace-5.9
Author | SHA1 | Date | |
---|---|---|---|
![]() |
be88702e4e | ||
![]() |
b5e0777575 | ||
![]() |
2b8a177c71 | ||
![]() |
ce6212bcfc | ||
![]() |
83f74f93a0 | ||
![]() |
f002a3e885 | ||
![]() |
8b849559bc | ||
![]() |
d00880481a | ||
![]() |
2a70460983 | ||
![]() |
2bf85d9e8d | ||
![]() |
8ebdc626db | ||
![]() |
f7509e95f0 | ||
![]() |
ac796e51ba | ||
![]() |
df2a6a59e5 | ||
![]() |
1008277c02 | ||
![]() |
1d91a47d23 | ||
![]() |
b8e289ae73 | ||
![]() |
3fe47c95a2 | ||
![]() |
41ed0511ec | ||
![]() |
e0f8da6671 | ||
![]() |
0a89ba8834 | ||
![]() |
8d81e825de | ||
![]() |
a0ea20bd18 | ||
![]() |
a392058032 | ||
![]() |
31a19e7084 | ||
![]() |
4d2cde0bfb | ||
![]() |
17b2d0a67e | ||
![]() |
9fc0ce0df7 | ||
![]() |
a16168edbe | ||
![]() |
b869a242d3 | ||
![]() |
246df7b265 | ||
![]() |
78e68f6cb7 | ||
![]() |
c01a12f3d8 | ||
![]() |
88e7b322c0 | ||
![]() |
682b4b0043 | ||
![]() |
418fd92a4c | ||
![]() |
4e30af2c0f | ||
![]() |
07b050cf7d | ||
![]() |
483d23ae82 | ||
![]() |
2d6fceed53 | ||
![]() |
4a5649174f | ||
![]() |
75e100d97e | ||
![]() |
577f3e31fe | ||
![]() |
ff1f01224f | ||
![]() |
52ec272ee6 | ||
![]() |
890b04689f | ||
![]() |
50eca14e9f | ||
![]() |
51b74510b9 | ||
![]() |
f96185dcea | ||
![]() |
98ac9ed3ce | ||
![]() |
50ac3b6819 | ||
![]() |
ec8e839ef5 | ||
![]() |
3832acc63e | ||
![]() |
12f978ecee | ||
![]() |
9981dfcacd | ||
![]() |
ec5056750f | ||
![]() |
266d016653 | ||
![]() |
4f0e0aec5e | ||
![]() |
f673b8da37 | ||
![]() |
e55212c14f | ||
![]() |
9401d971f6 | ||
![]() |
090b617c28 | ||
![]() |
504e2ae270 | ||
![]() |
99683cb810 | ||
![]() |
dab9bd40ed | ||
![]() |
7bbeea2633 | ||
![]() |
0182392563 | ||
![]() |
1c3673b37d | ||
![]() |
40d5f113a9 | ||
![]() |
97b22916f4 | ||
![]() |
711b4e8a96 | ||
![]() |
778f9dfec0 | ||
![]() |
15046de363 | ||
![]() |
ed8b31721f | ||
![]() |
9edd2cd218 | ||
![]() |
a7bed3a293 | ||
![]() |
d917b3158d | ||
![]() |
aef0f52a5b | ||
![]() |
09713ea4a8 | ||
![]() |
54f5cd87fc | ||
![]() |
c8f62e6f49 | ||
![]() |
bdf665b07e | ||
![]() |
8a790dedd3 | ||
![]() |
23aa21ae86 | ||
![]() |
9f531fb244 | ||
![]() |
5a1943cf22 | ||
![]() |
2a627d8bbd | ||
![]() |
632a55d894 | ||
![]() |
b434b999b7 | ||
![]() |
3963d3929e | ||
![]() |
f81cdf5283 | ||
![]() |
dd7502f758 | ||
![]() |
d557c019f2 | ||
![]() |
7467741624 | ||
![]() |
91a00e237c | ||
![]() |
f45252547d | ||
![]() |
d2c123d8c9 | ||
![]() |
943619248a | ||
![]() |
848aea9b27 | ||
![]() |
b8e784f8c2 | ||
![]() |
0b9d05154c | ||
![]() |
5a81ba0f3b | ||
![]() |
63ab1f13f8 | ||
![]() |
19d8144faa | ||
![]() |
7d53df0d6b | ||
![]() |
2bd6c2e392 | ||
![]() |
b0e624d72c | ||
![]() |
b37bd18c51 | ||
![]() |
51bb72da2a | ||
![]() |
3c8ecb5d1f | ||
![]() |
59302e9d6f | ||
![]() |
567ec083c8 | ||
![]() |
329f3b48a6 | ||
![]() |
a52779c571 | ||
![]() |
9a0334da7f | ||
![]() |
8e4db1344e | ||
![]() |
a9b8cca20f | ||
![]() |
93f368ff6b | ||
![]() |
fbc023019c | ||
![]() |
e00dc3d421 | ||
![]() |
77a4da32ec | ||
![]() |
79014ed943 | ||
![]() |
0dbaa81b54 | ||
![]() |
c36e6f9f02 | ||
![]() |
f7b6c83e99 | ||
![]() |
2510609f68 | ||
![]() |
03724151be | ||
![]() |
52db795b72 | ||
![]() |
5f3f552078 | ||
![]() |
39f4db91da | ||
![]() |
04ba49ba56 | ||
![]() |
1aa92f8d00 | ||
![]() |
85f2195396 | ||
![]() |
c5cdedb0c6 | ||
![]() |
b805aaf1dd | ||
![]() |
da315a4911 | ||
![]() |
ea4e3ee857 | ||
![]() |
1c4089c6b2 | ||
![]() |
e8a06006ae | ||
![]() |
9e0208fa96 | ||
![]() |
76d6dec743 | ||
![]() |
427ba190a6 | ||
![]() |
bdd4eb20dc | ||
![]() |
c7cbd44330 | ||
![]() |
50a4f046d4 | ||
![]() |
b5330b7815 | ||
![]() |
4fed285c83 | ||
![]() |
9390016397 | ||
![]() |
b3c7f0a7f1 | ||
![]() |
8da8431869 | ||
![]() |
2549e643f9 | ||
![]() |
ac0721767b | ||
![]() |
679c971ec3 | ||
![]() |
b50d35d3f3 | ||
![]() |
d6412e9af3 | ||
![]() |
067c1b1a95 | ||
![]() |
20026af124 | ||
![]() |
b3f9ea0eaa | ||
![]() |
987a16d23f | ||
![]() |
43d44aa0cc | ||
![]() |
307d577b35 | ||
![]() |
04c60ba939 | ||
![]() |
462360ed4d | ||
![]() |
c6fda557f7 | ||
![]() |
e73f83f7a4 | ||
![]() |
9f0f5940e7 | ||
![]() |
88ed833e2c | ||
![]() |
91d4081b03 | ||
![]() |
d9e986d669 | ||
![]() |
132f37a10a | ||
![]() |
98a26fa3e7 | ||
![]() |
4f5f5acdbe | ||
![]() |
212011cc75 | ||
![]() |
e7b49d8310 | ||
![]() |
a70f0bdd22 | ||
![]() |
a84763a258 | ||
![]() |
5a1028a7a9 | ||
![]() |
16b123e9df | ||
![]() |
f057ed8c07 | ||
![]() |
875bb59eb0 | ||
![]() |
2c09aea8fd | ||
![]() |
533245c8dd | ||
![]() |
875bba3add | ||
![]() |
55e623d1c2 | ||
![]() |
81a6d173ca | ||
![]() |
3ff604742b | ||
![]() |
3bfe7b8ea8 | ||
![]() |
ee62f9d6f0 | ||
![]() |
be35b0450b | ||
![]() |
8c94edc29c | ||
![]() |
2bf0275678 | ||
![]() |
86ca33eaa3 | ||
![]() |
f64d4b3367 | ||
![]() |
c908997900 | ||
![]() |
e2dd1089c9 | ||
![]() |
8809150e66 | ||
![]() |
1fd2723848 | ||
![]() |
454f40b3f4 | ||
![]() |
f05c9e794f | ||
![]() |
56fc41cac3 | ||
![]() |
0175e5edff | ||
![]() |
d17886c1cd | ||
![]() |
06668c363e | ||
![]() |
4b3a07120c | ||
![]() |
50c4a54bd6 | ||
![]() |
0aabf5d780 | ||
![]() |
04ce6ff2f4 | ||
![]() |
1f8f6241c2 | ||
![]() |
4a2f392ed8 | ||
![]() |
fac705ec3f | ||
![]() |
e1263249f5 | ||
![]() |
553b1a72c5 | ||
![]() |
6242865207 | ||
![]() |
59fa31641a | ||
![]() |
58344b610f | ||
![]() |
563d90f7c4 | ||
![]() |
131555604a | ||
![]() |
fbde108024 | ||
![]() |
2c59a9dd35 | ||
![]() |
d307c56d07 | ||
![]() |
1d2b954889 | ||
![]() |
69cfc61167 | ||
![]() |
b944ceb112 | ||
![]() |
9885ed851a | ||
![]() |
52ce1eb52b | ||
![]() |
deeef45943 | ||
![]() |
ad21875ac8 | ||
![]() |
4ee79a3d89 | ||
![]() |
c01c3af153 | ||
![]() |
f493a475fd | ||
![]() |
a3a5f562c9 | ||
![]() |
3479b0a254 | ||
![]() |
39289b6762 | ||
![]() |
edf7ea6524 | ||
![]() |
2045fee8ab | ||
![]() |
bac9beaffa | ||
![]() |
569ad5f546 | ||
![]() |
b465f26646 | ||
![]() |
ad19c3aeb6 | ||
![]() |
34c20d49ad | ||
![]() |
eaa08adb62 | ||
![]() |
15f3c247bc | ||
![]() |
2a44765f39 | ||
![]() |
87c34f1f1c | ||
![]() |
fce84880bc | ||
![]() |
3f94c3acb4 | ||
![]() |
50cb865ea2 | ||
![]() |
a9b8d8bfbc | ||
![]() |
600f680cd6 | ||
![]() |
01d7d060d7 | ||
![]() |
4a6663c2f4 | ||
![]() |
b3c87b2be7 | ||
![]() |
ac08b6a4e3 | ||
![]() |
a2f5fe34eb | ||
![]() |
ace19199e5 | ||
![]() |
6d9fa26535 | ||
![]() |
3efe549774 | ||
![]() |
734744ec4f | ||
![]() |
829c30bab4 | ||
![]() |
83cb04ed53 | ||
![]() |
0911d60290 | ||
![]() |
9bb7036857 | ||
![]() |
e0368f3ade | ||
![]() |
660217c3f9 | ||
![]() |
5f13b8cc64 | ||
![]() |
a2caabc79a | ||
![]() |
cb9710cda4 | ||
![]() |
56abebaece | ||
![]() |
0310db74aa | ||
![]() |
3e1bac69df | ||
![]() |
ec86af5a82 | ||
![]() |
79e111996b | ||
![]() |
f4c6f2680c | ||
![]() |
f3487be040 | ||
![]() |
87d0770974 | ||
![]() |
1c9fa656aa | ||
![]() |
59ff964f4f | ||
![]() |
10c4661885 | ||
![]() |
afe9c1294f | ||
![]() |
7a54972ed1 | ||
![]() |
b2cb0ef4dd | ||
![]() |
5edf641d6c | ||
![]() |
d9b14a86f0 | ||
![]() |
7b8fa49632 | ||
![]() |
b5540d5999 | ||
![]() |
494ff0c4c1 | ||
![]() |
1c4c8943a9 | ||
![]() |
5cd56fb834 | ||
![]() |
ed89d6b00e | ||
![]() |
19b28f4734 | ||
![]() |
4a8fdf6843 | ||
![]() |
d040b9dd4e | ||
![]() |
4036bf781a | ||
![]() |
d011e24f74 | ||
![]() |
0e9f78e9df | ||
![]() |
254097b2e2 | ||
![]() |
8049cef23b | ||
![]() |
de842dbf30 | ||
![]() |
8bcac58154 | ||
![]() |
511b78277f | ||
![]() |
dbd019943a | ||
![]() |
7d8a9d5636 | ||
![]() |
2ab6b10a03 | ||
![]() |
cd7789e8df | ||
![]() |
9287aa891f | ||
![]() |
a99203382c | ||
![]() |
6ec649df78 | ||
![]() |
e9f4e4c2cc | ||
![]() |
18cc6bb3ff | ||
![]() |
8094d8fe18 | ||
![]() |
b7a469d53c | ||
![]() |
f168c6c33d | ||
![]() |
981b62d9e9 | ||
![]() |
2c42d71a6a | ||
![]() |
ca6bc57c6d | ||
![]() |
0f0be17d0a | ||
![]() |
5e5a7922d0 | ||
![]() |
bb4cb39373 | ||
![]() |
a257f516fa | ||
![]() |
9d8284d85f | ||
![]() |
57efa4f628 | ||
![]() |
5b5f44085a | ||
![]() |
46ce2741bc | ||
![]() |
0b799fc882 | ||
![]() |
04b57a60b3 | ||
![]() |
02b4314046 | ||
![]() |
3d79fa76ab | ||
![]() |
ca1803ae93 | ||
![]() |
9046ec21d4 | ||
![]() |
b30654e3d5 | ||
![]() |
ee19e11e6d | ||
![]() |
a990c97959 | ||
![]() |
56816b13ba | ||
![]() |
b414aaa195 | ||
![]() |
1a1ae35ec9 | ||
![]() |
1029f393e4 | ||
![]() |
c1039dfe26 | ||
![]() |
cc96646e37 | ||
![]() |
d2ad7c81de | ||
![]() |
00e9c1131f | ||
![]() |
77cc9abe49 | ||
![]() |
91018bfe0f | ||
![]() |
7f9bcb283f | ||
![]() |
ae11c1c795 | ||
![]() |
9cd5fa596b | ||
![]() |
e10b10224a | ||
![]() |
e08886ae09 | ||
![]() |
df3ffcf7f9 | ||
![]() |
0c77f7be91 | ||
![]() |
cdc8e3144e | ||
![]() |
92847079d7 | ||
![]() |
b023c36941 | ||
![]() |
aee3b0b710 | ||
![]() |
d0c8afb601 | ||
![]() |
e9c14bbcea | ||
![]() |
2eca19daa3 | ||
![]() |
bcc7a75baa | ||
![]() |
19222e9341 | ||
![]() |
8124a61738 | ||
![]() |
09007146d0 | ||
![]() |
e715c64404 | ||
![]() |
53ff4510ac | ||
![]() |
495031001d | ||
![]() |
97e89384f1 | ||
![]() |
72913cda76 | ||
![]() |
03097aaa35 | ||
![]() |
f6d3f67b52 | ||
![]() |
62e0ac462e | ||
![]() |
54310b014b | ||
![]() |
beaf54f624 | ||
![]() |
114f1e0985 | ||
![]() |
1fdfe05c4c | ||
![]() |
9c1f91d40b | ||
![]() |
39711b332f | ||
![]() |
6cfda147b4 | ||
![]() |
eabdc610a0 | ||
![]() |
da74f5aa7e | ||
![]() |
14c575a7c4 | ||
![]() |
d8c8d28c13 | ||
![]() |
bf56f1f7e3 | ||
![]() |
8046d154ee | ||
![]() |
589117e204 | ||
![]() |
e9e5423f97 | ||
![]() |
c08f447cec | ||
![]() |
cf25175155 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -3,6 +3,7 @@ target/
|
||||
|
||||
## Ignore project files created by Eclipse
|
||||
.settings/
|
||||
/bin/
|
||||
.project
|
||||
.classpath
|
||||
|
||||
|
@@ -1,4 +1,5 @@
|
||||
language: java
|
||||
sudo: false
|
||||
|
||||
env:
|
||||
# Give Maven 1GB of memory to work with
|
||||
@@ -6,6 +7,8 @@ env:
|
||||
|
||||
# Install prerequisites for building Mirage2 more rapidly
|
||||
before_install:
|
||||
# Remove outdated settings.xml from Travis builds. Workaround for https://github.com/travis-ci/travis-ci/issues/4629
|
||||
- rm ~/.m2/settings.xml
|
||||
# Install latest Node.js 0.10.x & print version info
|
||||
- nvm install 0.10
|
||||
- node --version
|
||||
@@ -17,10 +20,10 @@ before_install:
|
||||
# Print ruby version info (should be installed)
|
||||
- ruby -v
|
||||
# Install Sass & print version info
|
||||
- gem install sass
|
||||
- gem install sass -v 3.3.14
|
||||
- sass -v
|
||||
# Install Compass & print version info
|
||||
- gem install compass
|
||||
- gem install compass -v 1.0.1
|
||||
- compass version
|
||||
|
||||
# Skip install stage, as we'll do it below
|
||||
|
2
LICENSE
2
LICENSE
@@ -1,7 +1,7 @@
|
||||
DSpace source code license:
|
||||
|
||||
|
||||
Copyright (c) 2002-2013, DuraSpace. All rights reserved.
|
||||
Copyright (c) 2002-2017, DuraSpace. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
|
@@ -266,22 +266,24 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Morfologik Stemming Dictionary for Polish (org.carrot2:morfologik-polish:1.7.1 - http://morfologik.blogspot.com/morfologik-polish/)
|
||||
* Morfologik Stemming APIs (org.carrot2:morfologik-stemming:1.7.1 - http://morfologik.blogspot.com/morfologik-stemming/)
|
||||
* databene ContiPerf (org.databene:contiperf:2.2.0 - http://databene.org/contiperf)
|
||||
* DSpace Kernel :: API and Implementation (org.dspace:dspace-api:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api)
|
||||
* DSpace JSP-UI (org.dspace:dspace-jspui:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui)
|
||||
* DSpace OAI-PMH (org.dspace:dspace-oai:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai)
|
||||
* DSpace RDF (org.dspace:dspace-rdf:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf)
|
||||
* DSpace REST :: API and Implementation (org.dspace:dspace-rest:5.0-rc4-SNAPSHOT - http://demo.dspace.org)
|
||||
* DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services)
|
||||
* Apache Solr Webapp (org.dspace:dspace-solr:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr)
|
||||
* DSpace SWORD (org.dspace:dspace-sword:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword)
|
||||
* DSpace SWORD v2 (org.dspace:dspace-swordv2:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2)
|
||||
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui)
|
||||
* DSpace Kernel :: API and Implementation (org.dspace:dspace-api:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api)
|
||||
* DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:5.0.4 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-api-lang)
|
||||
* DSpace JSP-UI (org.dspace:dspace-jspui:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui)
|
||||
* DSpace OAI-PMH (org.dspace:dspace-oai:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai)
|
||||
* DSpace RDF (org.dspace:dspace-rdf:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf)
|
||||
* DSpace REST :: API and Implementation (org.dspace:dspace-rest:5.3-SNAPSHOT - http://demo.dspace.org)
|
||||
* DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services)
|
||||
* Apache Solr Webapp (org.dspace:dspace-solr:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr)
|
||||
* DSpace SWORD (org.dspace:dspace-sword:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword)
|
||||
* DSpace SWORD v2 (org.dspace:dspace-swordv2:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2)
|
||||
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui)
|
||||
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:5.0.5 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-xmlui-lang)
|
||||
* handle (org.dspace:handle:6.2 - no url defined)
|
||||
* jargon (org.dspace:jargon:1.4.25 - no url defined)
|
||||
* mets (org.dspace:mets:1.5.2 - no url defined)
|
||||
* oclc-harvester2 (org.dspace:oclc-harvester2:0.1.12 - no url defined)
|
||||
* Repackaged Cocoon Servlet Service Implementation (org.dspace.dependencies.cocoon:dspace-cocoon-servlet-service-impl:1.0.3 - http://projects.dspace.org/dspace-pom/dspace-cocoon-servlet-service-impl)
|
||||
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
|
||||
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:5.3-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
|
||||
* Hamcrest All (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all)
|
||||
* Hamcrest Core (org.hamcrest:hamcrest-core:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-core)
|
||||
* JBibTeX (org.jbibtex:jbibtex:1.0.10 - http://www.jbibtex.org)
|
||||
@@ -386,8 +388,3 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Dough Lea's util.concurrent package (concurrent:concurrent:1.3.4 - no url defined)
|
||||
* Reflections (org.reflections:reflections:0.9.9-RC1 - http://code.google.com/p/reflections/reflections/)
|
||||
* XZ for Java (org.tukaani:xz:1.4 - http://tukaani.org/xz/java.html)
|
||||
|
||||
Unknown license:
|
||||
|
||||
* DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:5.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-api-lang)
|
||||
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:5.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-xmlui-lang)
|
||||
|
@@ -66,14 +66,12 @@ db.password=dspace
|
||||
#db.username=dspace
|
||||
#db.password=dspace
|
||||
|
||||
# Schema name - if your database contains multiple schemas, you can avoid problems with
|
||||
# retrieving the definitions of duplicate object names by specifying
|
||||
# the schema name here that is used for DSpace by uncommenting the following entry
|
||||
|
||||
# NOTE: this configuration option is for PostgreSQL only. For Oracle, schema is equivalent
|
||||
# to user name. DSpace depends on the PostgreSQL understanding of schema. If you are using
|
||||
# Oracle, just leave this this value blank.
|
||||
|
||||
# Schema name - if your database contains multiple schemas, you can avoid
|
||||
# problems with retrieving the definitions of duplicate object names by
|
||||
# specifying the schema name that is used for DSpace.
|
||||
# ORACLE USAGE NOTE: In Oracle, schema is equivalent to "username". This means
|
||||
# specifying a "db.schema" is often unnecessary (i.e. you can leave it blank),
|
||||
# UNLESS your Oracle DB Account (in db.username) has access to multiple schemas.
|
||||
db.schema =
|
||||
|
||||
# Maximum number of DB connections in pool
|
||||
|
@@ -12,7 +12,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>5.0</version>
|
||||
<version>5.9</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -209,7 +209,7 @@
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
|
||||
<!-- FileWeaver plugin is in charge of initializing & "weaving" together
|
||||
the dspace.cfg file to be used by the Unit Testing environment.
|
||||
It weaves two files, the default 'dspace.cfg' and 'dspace.cfg.more',
|
||||
@@ -528,9 +528,9 @@
|
||||
<version>1.8</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.maxmind.geoip</groupId>
|
||||
<artifactId>geoip-api</artifactId>
|
||||
<version>1.2.11</version>
|
||||
<groupId>com.maxmind.geoip2</groupId>
|
||||
<artifactId>geoip2</artifactId>
|
||||
<version>2.11.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.ant</groupId>
|
||||
@@ -569,10 +569,10 @@
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>postgresql</groupId>
|
||||
<groupId>org.postgresql</groupId>
|
||||
<artifactId>postgresql</artifactId>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
@@ -590,7 +590,7 @@
|
||||
<artifactId>im4java</artifactId>
|
||||
<version>1.4.0</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<!-- Flyway DB API (flywaydb.org) is used to manage DB upgrades automatically. -->
|
||||
<dependency>
|
||||
<groupId>org.flywaydb</groupId>
|
||||
@@ -603,13 +603,21 @@
|
||||
<groupId>com.google.apis</groupId>
|
||||
<artifactId>google-api-services-analytics</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.api-client</groupId>
|
||||
<artifactId>google-api-client</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.http-client</groupId>
|
||||
<artifactId>google-http-client</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.http-client</groupId>
|
||||
<artifactId>google-http-client-jackson2</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.oauth-client</groupId>
|
||||
<artifactId>google-oauth-client-jetty</artifactId>
|
||||
<artifactId>google-oauth-client</artifactId>
|
||||
</dependency>
|
||||
<!-- FindBugs -->
|
||||
<dependency>
|
||||
@@ -631,6 +639,66 @@
|
||||
<version>1</version>
|
||||
<type>jar</type>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.ws.commons.axiom</groupId>
|
||||
<artifactId>axiom-impl</artifactId>
|
||||
<!-- NOTE: SWORDv2 needs 1.2.14, required by Abdera: https://abdera.apache.org/ -->
|
||||
<version>1.2.14</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.ws.commons.axiom</groupId>
|
||||
<artifactId>axiom-api</artifactId>
|
||||
<!-- NOTE: SWORDv2 needs 1.2.14, required by Abdera: https://abdera.apache.org/ -->
|
||||
<version>1.2.14</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.glassfish.jersey.core</groupId>
|
||||
<artifactId>jersey-client</artifactId>
|
||||
<version>2.22.1</version>
|
||||
</dependency>
|
||||
<!-- S3 -->
|
||||
<dependency>
|
||||
<groupId>com.amazonaws</groupId>
|
||||
<artifactId>aws-java-sdk-s3</artifactId>
|
||||
<version>1.10.50</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<!-- S3 also wanted jackson... -->
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-annotations</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- For ORCID v2 integration -->
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>orcid-jaxb-api</artifactId>
|
||||
<version>2.1.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.json</groupId>
|
||||
<artifactId>json</artifactId>
|
||||
<version>20180130</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
</project>
|
||||
|
@@ -113,8 +113,8 @@ public class CommunityFiliator
|
||||
CommunityFiliator filiator = new CommunityFiliator();
|
||||
Context c = new Context();
|
||||
|
||||
// ve are superuser!
|
||||
c.setIgnoreAuthorization(true);
|
||||
// we are superuser!
|
||||
c.turnOffAuthorisationSystem();
|
||||
|
||||
try
|
||||
{
|
||||
|
@@ -228,7 +228,7 @@ public final class CreateAdministrator
|
||||
{
|
||||
// Of course we aren't an administrator yet so we need to
|
||||
// circumvent authorisation
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
// Find administrator group
|
||||
Group admins = Group.find(context, 1);
|
||||
|
@@ -88,7 +88,7 @@ public class MetadataExporter
|
||||
{
|
||||
// create a context
|
||||
Context context = new Context();
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
OutputFormat xmlFormat = new OutputFormat(Method.XML, "UTF-8", true);
|
||||
xmlFormat.setLineWidth(120);
|
||||
|
@@ -187,7 +187,7 @@ public class DSpaceCSV implements Serializable
|
||||
StringBuilder lineBuilder = new StringBuilder();
|
||||
String lineRead;
|
||||
|
||||
while (StringUtils.isNotBlank(lineRead = input.readLine()))
|
||||
while ((lineRead = input.readLine()) != null)
|
||||
{
|
||||
if (lineBuilder.length() > 0) {
|
||||
// Already have a previously read value - add this line
|
||||
|
@@ -1169,10 +1169,8 @@ public class MetadataImport
|
||||
*/
|
||||
private static boolean isAuthorityControlledField(String md)
|
||||
{
|
||||
int pos = md.indexOf("[");
|
||||
String mdf = (pos > -1 ? md.substring(0, pos) : md);
|
||||
pos = md.indexOf(":");
|
||||
mdf = (pos > -1 ? md.substring(pos+1) : md);
|
||||
String mdf = StringUtils.substringAfter(md, ":");
|
||||
mdf = StringUtils.substringBefore(mdf, "[");
|
||||
return authorityControlled.contains(mdf);
|
||||
}
|
||||
|
||||
|
@@ -89,16 +89,16 @@ public class MetadataImportInvalidHeadingException extends Exception
|
||||
{
|
||||
if (type == SCHEMA)
|
||||
{
|
||||
return "Unknown metadata schema in row " + column + ": " + badHeading;
|
||||
return "Unknown metadata schema in column " + column + ": " + badHeading;
|
||||
} else if (type == ELEMENT)
|
||||
{
|
||||
return "Unknown metadata element in row " + column + ": " + badHeading;
|
||||
return "Unknown metadata element in column " + column + ": " + badHeading;
|
||||
} else if (type == MISSING)
|
||||
{
|
||||
return "Row with missing header: Row " + column;
|
||||
return "Row with missing header: column " + column;
|
||||
} else
|
||||
{
|
||||
return "Bad metadata declaration in row " + column + ": " + badHeading;
|
||||
return "Bad metadata declaration in column" + column + ": " + badHeading;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -209,7 +209,7 @@ public class ItemExport
|
||||
}
|
||||
|
||||
Context c = new Context();
|
||||
c.setIgnoreAuthorization(true);
|
||||
c.turnOffAuthorisationSystem();
|
||||
|
||||
if (myType == Constants.ITEM)
|
||||
{
|
||||
|
@@ -284,7 +284,7 @@ public class ItemImport
|
||||
if (line.hasOption('z'))
|
||||
{
|
||||
zip = true;
|
||||
zipfilename = sourcedir + System.getProperty("file.separator") + line.getOptionValue('z');
|
||||
zipfilename = line.getOptionValue('z');
|
||||
}
|
||||
|
||||
//By default assume collections will be given on the command line
|
||||
@@ -733,7 +733,7 @@ public class ItemImport
|
||||
{
|
||||
clist = mycollections;
|
||||
}
|
||||
addItem(c, mycollections, sourceDir, dircontents[i], mapOut, template);
|
||||
addItem(c, clist, sourceDir, dircontents[i], mapOut, template);
|
||||
System.out.println(i + " " + dircontents[i]);
|
||||
c.clearCache();
|
||||
}
|
||||
@@ -1108,6 +1108,10 @@ public class ItemImport
|
||||
{
|
||||
value = "";
|
||||
}
|
||||
else
|
||||
{
|
||||
value = value.trim();
|
||||
}
|
||||
// //getElementData(n, "element");
|
||||
String element = getAttributeValue(n, "element");
|
||||
String qualifier = getAttributeValue(n, "qualifier"); //NodeValue();
|
||||
@@ -1129,8 +1133,8 @@ public class ItemImport
|
||||
{
|
||||
qualifier = null;
|
||||
}
|
||||
|
||||
if (!isTest)
|
||||
// only add metadata if it is no test and there is an real value
|
||||
if (!isTest && !value.equals(""))
|
||||
{
|
||||
i.addMetadata(schema, element, qualifier, language, value);
|
||||
}
|
||||
@@ -2148,7 +2152,7 @@ public class ItemImport
|
||||
context = new Context();
|
||||
eperson = EPerson.find(context, oldEPerson.getID());
|
||||
context.setCurrentUser(eperson);
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
boolean isResume = theResumeDir!=null;
|
||||
|
||||
|
@@ -351,7 +351,7 @@ public class ItemUpdate {
|
||||
|
||||
context = new Context();
|
||||
iu.setEPerson(context, iu.eperson);
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
HANDLE_PREFIX = ConfigurationManager.getProperty("handle.canonical.prefix");
|
||||
if (HANDLE_PREFIX == null || HANDLE_PREFIX.length() == 0)
|
||||
@@ -362,19 +362,20 @@ public class ItemUpdate {
|
||||
iu.processArchive(context, sourcedir, itemField, metadataIndexName, alterProvenance, isTest);
|
||||
|
||||
context.complete(); // complete all transactions
|
||||
context.setIgnoreAuthorization(false);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
if (context != null && context.isValid())
|
||||
{
|
||||
context.abort();
|
||||
context.setIgnoreAuthorization(false);
|
||||
}
|
||||
e.printStackTrace();
|
||||
pr(e.toString());
|
||||
status = 1;
|
||||
}
|
||||
finally {
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
if (isTest)
|
||||
{
|
||||
|
@@ -11,6 +11,7 @@ import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.List;
|
||||
import java.util.TreeMap;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.servicemanager.DSpaceKernelImpl;
|
||||
import org.dspace.servicemanager.DSpaceKernelInit;
|
||||
@@ -275,9 +276,21 @@ public class ScriptLauncher
|
||||
*/
|
||||
private static void display()
|
||||
{
|
||||
// List all command elements
|
||||
List<Element> commands = commandConfigs.getRootElement().getChildren("command");
|
||||
System.out.println("Usage: dspace [command-name] {parameters}");
|
||||
|
||||
// Sort the commands by name.
|
||||
// We cannot just use commands.sort() because it tries to remove and
|
||||
// reinsert Elements within other Elements, and that doesn't work.
|
||||
TreeMap<String, Element> sortedCommands = new TreeMap<>();
|
||||
for (Element command : commands)
|
||||
{
|
||||
sortedCommands.put(command.getChild("name").getValue(), command);
|
||||
}
|
||||
|
||||
// Display the sorted list
|
||||
System.out.println("Usage: dspace [command-name] {parameters}");
|
||||
for (Element command : sortedCommands.values())
|
||||
{
|
||||
System.out.println(" - " + command.getChild("name").getValue() +
|
||||
": " + command.getChild("description").getValue());
|
||||
|
@@ -7,9 +7,10 @@
|
||||
*/
|
||||
package org.dspace.app.mediafilter;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
|
||||
|
||||
/**
|
||||
@@ -30,9 +31,24 @@ public class ImageMagickImageThumbnailFilter extends ImageMagickThumbnailFilter
|
||||
throws Exception
|
||||
{
|
||||
File f = inputStreamToTempFile(source, "imthumb", ".tmp");
|
||||
File f2 = getThumbnailFile(f);
|
||||
return new FileInputStream(f2);
|
||||
}
|
||||
File f2 = null;
|
||||
try
|
||||
{
|
||||
f2 = getThumbnailFile(f);
|
||||
byte[] bytes = Files.readAllBytes(f2.toPath());
|
||||
return new ByteArrayInputStream(bytes);
|
||||
}
|
||||
finally
|
||||
{
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f.delete();
|
||||
if (f2 != null)
|
||||
{
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f2.delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@@ -7,24 +7,40 @@
|
||||
*/
|
||||
package org.dspace.app.mediafilter;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
|
||||
public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter {
|
||||
public InputStream getDestinationStream(InputStream source)
|
||||
throws Exception
|
||||
{
|
||||
File f = inputStreamToTempFile(source, "impdfthumb", ".pdf");
|
||||
File f2 = getImageFile(f, 0);
|
||||
File f3 = getThumbnailFile(f2);
|
||||
return new FileInputStream(f3);
|
||||
File f2 = null;
|
||||
File f3 = null;
|
||||
try
|
||||
{
|
||||
f2 = getImageFile(f, 0);
|
||||
f3 = getThumbnailFile(f2);
|
||||
byte[] bytes = Files.readAllBytes(f3.toPath());
|
||||
return new ByteArrayInputStream(bytes);
|
||||
}
|
||||
finally
|
||||
{
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f.delete();
|
||||
if (f2 != null)
|
||||
{
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f2.delete();
|
||||
}
|
||||
if (f3 != null)
|
||||
{
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f3.delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static final String[] PDF = {"Adobe PDF"};
|
||||
public String[] getInputMIMETypes()
|
||||
{
|
||||
return PDF;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -23,6 +23,7 @@ import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.im4java.core.ConvertCmd;
|
||||
import org.im4java.core.Info;
|
||||
import org.im4java.core.IM4JavaException;
|
||||
import org.im4java.core.IMOperation;
|
||||
import org.im4java.process.ProcessStarter;
|
||||
@@ -34,161 +35,163 @@ import org.dspace.core.ConfigurationManager;
|
||||
* thumbnail.maxwidth, thumbnail.maxheight, the size we want our thumbnail to be
|
||||
* no bigger than. Creates only JPEGs.
|
||||
*/
|
||||
public abstract class ImageMagickThumbnailFilter extends MediaFilter implements SelfRegisterInputFormats
|
||||
{
|
||||
private static int width = 180;
|
||||
private static int height = 120;
|
||||
static String bitstreamDescription = "IM Thumbnail";
|
||||
static final String defaultPattern = "Generated Thumbnail";
|
||||
static Pattern replaceRegex = Pattern.compile(defaultPattern);
|
||||
|
||||
static {
|
||||
String pre = ImageMagickThumbnailFilter.class.getName();
|
||||
String s = ConfigurationManager.getProperty(pre + ".ProcessStarter");
|
||||
ProcessStarter.setGlobalSearchPath(s);
|
||||
width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width);
|
||||
height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height);
|
||||
String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription");
|
||||
if (description != null) {
|
||||
bitstreamDescription = description;
|
||||
}
|
||||
try {
|
||||
String patt = ConfigurationManager.getProperty(pre + ".replaceRegex");
|
||||
replaceRegex = Pattern.compile(patt == null ? defaultPattern : patt);
|
||||
} catch(PatternSyntaxException e) {
|
||||
System.err.println("Invalid thumbnail replacement pattern: "+e.getMessage());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public ImageMagickThumbnailFilter() {
|
||||
}
|
||||
|
||||
|
||||
public String getFilteredName(String oldFilename)
|
||||
{
|
||||
return oldFilename + ".jpg";
|
||||
}
|
||||
public abstract class ImageMagickThumbnailFilter extends MediaFilter {
|
||||
private static int width = 180;
|
||||
private static int height = 120;
|
||||
private static boolean flatten = true;
|
||||
static String bitstreamDescription = "IM Thumbnail";
|
||||
static final String defaultPattern = "Generated Thumbnail";
|
||||
static Pattern replaceRegex = Pattern.compile(defaultPattern);
|
||||
static String cmyk_profile;
|
||||
static String srgb_profile;
|
||||
|
||||
/**
|
||||
* @return String bundle name
|
||||
*
|
||||
*/
|
||||
public String getBundleName()
|
||||
{
|
||||
return "THUMBNAIL";
|
||||
}
|
||||
static {
|
||||
String pre = ImageMagickThumbnailFilter.class.getName();
|
||||
String s = ConfigurationManager.getProperty(pre + ".ProcessStarter");
|
||||
ProcessStarter.setGlobalSearchPath(s);
|
||||
width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width);
|
||||
height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height);
|
||||
flatten = ConfigurationManager.getBooleanProperty(pre + ".flatten", flatten);
|
||||
String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription");
|
||||
cmyk_profile = ConfigurationManager.getProperty(pre + ".cmyk_profile");
|
||||
srgb_profile = ConfigurationManager.getProperty(pre + ".srgb_profile");
|
||||
if (description != null) {
|
||||
bitstreamDescription = description;
|
||||
}
|
||||
try {
|
||||
String patt = ConfigurationManager.getProperty(pre + ".replaceRegex");
|
||||
replaceRegex = Pattern.compile(patt == null ? defaultPattern : patt);
|
||||
} catch (PatternSyntaxException e) {
|
||||
System.err.println("Invalid thumbnail replacement pattern: " + e.getMessage());
|
||||
}
|
||||
|
||||
/**
|
||||
* @return String bitstreamformat
|
||||
*/
|
||||
public String getFormatString()
|
||||
{
|
||||
return "JPEG";
|
||||
}
|
||||
|
||||
/**
|
||||
* @return String bitstreamDescription
|
||||
*/
|
||||
public String getDescription()
|
||||
{
|
||||
return bitstreamDescription;
|
||||
}
|
||||
|
||||
public static File inputStreamToTempFile(InputStream source, String prefix, String suffix) throws IOException {
|
||||
File f = File.createTempFile(prefix, suffix);
|
||||
f.deleteOnExit();
|
||||
FileOutputStream fos = new FileOutputStream(f);
|
||||
|
||||
byte[] buffer = new byte[1024];
|
||||
int len = source.read(buffer);
|
||||
while (len != -1) {
|
||||
fos.write(buffer, 0, len);
|
||||
len = source.read(buffer);
|
||||
}
|
||||
fos.close();
|
||||
return f;
|
||||
}
|
||||
|
||||
public static File getThumbnailFile(File f) throws IOException, InterruptedException, IM4JavaException {
|
||||
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
|
||||
f2.deleteOnExit();
|
||||
ConvertCmd cmd = new ConvertCmd();
|
||||
IMOperation op = new IMOperation();
|
||||
op.addImage(f.getAbsolutePath());
|
||||
op.thumbnail(width, height);
|
||||
op.addImage(f2.getAbsolutePath());
|
||||
if (MediaFilterManager.isVerbose) {
|
||||
System.out.println("IM Thumbnail Param: "+op);
|
||||
}
|
||||
cmd.run(op);
|
||||
return f2;
|
||||
}
|
||||
|
||||
public static File getImageFile(File f, int page) throws IOException, InterruptedException, IM4JavaException {
|
||||
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
|
||||
f2.deleteOnExit();
|
||||
ConvertCmd cmd = new ConvertCmd();
|
||||
IMOperation op = new IMOperation();
|
||||
String s = "[" + page + "]";
|
||||
op.addImage(f.getAbsolutePath()+s);
|
||||
op.addImage(f2.getAbsolutePath());
|
||||
if (MediaFilterManager.isVerbose) {
|
||||
System.out.println("IM Image Param: "+op);
|
||||
|
||||
public ImageMagickThumbnailFilter() {
|
||||
}
|
||||
cmd.run(op);
|
||||
return f2;
|
||||
}
|
||||
|
||||
public boolean preProcessBitstream(Context c, Item item, Bitstream source)
|
||||
throws Exception
|
||||
{
|
||||
String nsrc = source.getName();
|
||||
for(Bundle b: item.getBundles("THUMBNAIL")) {
|
||||
for(Bitstream bit: b.getBitstreams()) {
|
||||
String n = bit.getName();
|
||||
if (n != null) {
|
||||
if (nsrc != null) {
|
||||
if (!n.startsWith(nsrc)) continue;
|
||||
}
|
||||
}
|
||||
String description = bit.getDescription();
|
||||
//If anything other than a generated thumbnail is found, halt processing
|
||||
if (description != null) {
|
||||
if (replaceRegex.matcher(description).matches()) {
|
||||
if (MediaFilterManager.isVerbose) {
|
||||
System.out.println(description + " " + nsrc + " matches pattern and is replacable.");
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (description.equals(bitstreamDescription)) {
|
||||
if (MediaFilterManager.isVerbose) {
|
||||
System.out.println(bitstreamDescription + " " + nsrc + " is replacable.");
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
System.out.println("Custom Thumbnail exists for " + nsrc + " for item " + item.getHandle() + ". Thumbnail will not be generated. ");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return true; //assume that the thumbnail is a custom one
|
||||
}
|
||||
|
||||
public String[] getInputMIMETypes()
|
||||
{
|
||||
return ImageIO.getReaderMIMETypes();
|
||||
}
|
||||
public String getFilteredName(String oldFilename) {
|
||||
return oldFilename + ".jpg";
|
||||
}
|
||||
|
||||
public String[] getInputDescriptions()
|
||||
{
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* @return String bundle name
|
||||
*
|
||||
*/
|
||||
public String getBundleName() {
|
||||
return "THUMBNAIL";
|
||||
}
|
||||
|
||||
/**
|
||||
* @return String bitstreamformat
|
||||
*/
|
||||
public String getFormatString() {
|
||||
return "JPEG";
|
||||
}
|
||||
|
||||
/**
|
||||
* @return String bitstreamDescription
|
||||
*/
|
||||
public String getDescription() {
|
||||
return bitstreamDescription;
|
||||
}
|
||||
|
||||
public static File inputStreamToTempFile(InputStream source, String prefix, String suffix) throws IOException {
|
||||
File f = File.createTempFile(prefix, suffix);
|
||||
f.deleteOnExit();
|
||||
FileOutputStream fos = new FileOutputStream(f);
|
||||
|
||||
byte[] buffer = new byte[1024];
|
||||
int len = source.read(buffer);
|
||||
while (len != -1) {
|
||||
fos.write(buffer, 0, len);
|
||||
len = source.read(buffer);
|
||||
}
|
||||
fos.close();
|
||||
return f;
|
||||
}
|
||||
|
||||
public static File getThumbnailFile(File f) throws IOException, InterruptedException, IM4JavaException {
|
||||
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
|
||||
f2.deleteOnExit();
|
||||
ConvertCmd cmd = new ConvertCmd();
|
||||
IMOperation op = new IMOperation();
|
||||
op.autoOrient();
|
||||
op.addImage(f.getAbsolutePath());
|
||||
op.thumbnail(width, height);
|
||||
op.addImage(f2.getAbsolutePath());
|
||||
if (MediaFilterManager.isVerbose) {
|
||||
System.out.println("IM Thumbnail Param: " + op);
|
||||
}
|
||||
cmd.run(op);
|
||||
return f2;
|
||||
}
|
||||
|
||||
public static File getImageFile(File f, int page) throws IOException, InterruptedException, IM4JavaException {
|
||||
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
|
||||
f2.deleteOnExit();
|
||||
ConvertCmd cmd = new ConvertCmd();
|
||||
IMOperation op = new IMOperation();
|
||||
String s = "[" + page + "]";
|
||||
op.addImage(f.getAbsolutePath() + s);
|
||||
if (flatten) {
|
||||
op.flatten();
|
||||
}
|
||||
// PDFs using the CMYK color system can be handled specially if
|
||||
// profiles are defined
|
||||
if (cmyk_profile != null && srgb_profile != null) {
|
||||
Info imageInfo = new Info(f.getAbsolutePath(), true);
|
||||
String imageClass = imageInfo.getImageClass();
|
||||
if (imageClass.contains("CMYK")) {
|
||||
op.profile(cmyk_profile);
|
||||
op.profile(srgb_profile);
|
||||
}
|
||||
}
|
||||
op.addImage(f2.getAbsolutePath());
|
||||
if (MediaFilterManager.isVerbose) {
|
||||
System.out.println("IM Image Param: " + op);
|
||||
}
|
||||
cmd.run(op);
|
||||
return f2;
|
||||
}
|
||||
|
||||
public boolean preProcessBitstream(Context c, Item item, Bitstream source) throws Exception {
|
||||
String nsrc = source.getName();
|
||||
for (Bundle b : item.getBundles("THUMBNAIL")) {
|
||||
for (Bitstream bit : b.getBitstreams()) {
|
||||
String n = bit.getName();
|
||||
if (n != null) {
|
||||
if (nsrc != null) {
|
||||
if (!n.startsWith(nsrc))
|
||||
continue;
|
||||
}
|
||||
}
|
||||
String description = bit.getDescription();
|
||||
// If anything other than a generated thumbnail
|
||||
// is found, halt processing
|
||||
if (description != null) {
|
||||
if (replaceRegex.matcher(description).matches()) {
|
||||
if (MediaFilterManager.isVerbose) {
|
||||
System.out.println(description + " " + nsrc
|
||||
+ " matches pattern and is replacable.");
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (description.equals(bitstreamDescription)) {
|
||||
if (MediaFilterManager.isVerbose) {
|
||||
System.out.println(bitstreamDescription + " " + nsrc
|
||||
+ " is replacable.");
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
System.out.println("Custom Thumbnail exists for " + nsrc + " for item "
|
||||
+ item.getHandle() + ". Thumbnail will not be generated. ");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true; // assume that the thumbnail is a custom one
|
||||
}
|
||||
|
||||
public String[] getInputExtensions()
|
||||
{
|
||||
return ImageIO.getReaderFileSuffixes();
|
||||
}
|
||||
}
|
||||
|
@@ -33,7 +33,7 @@ import java.util.zip.GZIPOutputStream;
|
||||
* }
|
||||
* g.finish();
|
||||
* </pre>
|
||||
*
|
||||
*
|
||||
* @author Robert Tansley
|
||||
*/
|
||||
public abstract class AbstractGenerator
|
||||
@@ -59,7 +59,7 @@ public abstract class AbstractGenerator
|
||||
/**
|
||||
* Initialize this generator to write to the given directory. This must be
|
||||
* called by any subclass constructor.
|
||||
*
|
||||
*
|
||||
* @param outputDirIn
|
||||
* directory to write sitemap files to
|
||||
*/
|
||||
@@ -73,7 +73,7 @@ public abstract class AbstractGenerator
|
||||
|
||||
/**
|
||||
* Start writing a new sitemap file.
|
||||
*
|
||||
*
|
||||
* @throws IOException
|
||||
* if an error occurs creating the file
|
||||
*/
|
||||
@@ -97,7 +97,7 @@ public abstract class AbstractGenerator
|
||||
|
||||
/**
|
||||
* Add the given URL to the sitemap.
|
||||
*
|
||||
*
|
||||
* @param url
|
||||
* Full URL to add
|
||||
* @param lastMod
|
||||
@@ -129,7 +129,7 @@ public abstract class AbstractGenerator
|
||||
|
||||
/**
|
||||
* Finish with the current sitemap file.
|
||||
*
|
||||
*
|
||||
* @throws IOException
|
||||
* if an error occurs writing
|
||||
*/
|
||||
@@ -144,15 +144,18 @@ public abstract class AbstractGenerator
|
||||
* Complete writing sitemap files and write the index files. This is invoked
|
||||
* when all calls to {@link AbstractGenerator#addURL(String, Date)} have
|
||||
* been completed, and invalidates the generator.
|
||||
*
|
||||
*
|
||||
* @return number of sitemap files written.
|
||||
*
|
||||
*
|
||||
* @throws IOException
|
||||
* if an error occurs writing
|
||||
*/
|
||||
public int finish() throws IOException
|
||||
{
|
||||
closeCurrentFile();
|
||||
if (null != currentOutput)
|
||||
{
|
||||
closeCurrentFile();
|
||||
}
|
||||
|
||||
OutputStream fo = new FileOutputStream(new File(outputDir,
|
||||
getIndexFilename()));
|
||||
@@ -165,13 +168,13 @@ public abstract class AbstractGenerator
|
||||
PrintStream out = new PrintStream(fo);
|
||||
writeIndex(out, fileCount);
|
||||
out.close();
|
||||
|
||||
|
||||
return fileCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return marked-up text to be included in a sitemap about a given URL.
|
||||
*
|
||||
*
|
||||
* @param url
|
||||
* URL to add information about
|
||||
* @param lastMod
|
||||
@@ -183,14 +186,14 @@ public abstract class AbstractGenerator
|
||||
|
||||
/**
|
||||
* Return the boilerplate at the top of a sitemap file.
|
||||
*
|
||||
*
|
||||
* @return The boilerplate markup.
|
||||
*/
|
||||
public abstract String getLeadingBoilerPlate();
|
||||
|
||||
/**
|
||||
* Return the boilerplate at the end of a sitemap file.
|
||||
*
|
||||
*
|
||||
* @return The boilerplate markup.
|
||||
*/
|
||||
public abstract String getTrailingBoilerPlate();
|
||||
@@ -198,7 +201,7 @@ public abstract class AbstractGenerator
|
||||
/**
|
||||
* Return the maximum size in bytes that an individual sitemap file should
|
||||
* be.
|
||||
*
|
||||
*
|
||||
* @return the size in bytes.
|
||||
*/
|
||||
public abstract int getMaxSize();
|
||||
@@ -206,7 +209,7 @@ public abstract class AbstractGenerator
|
||||
/**
|
||||
* Return the maximum number of URLs that an individual sitemap file should
|
||||
* contain.
|
||||
*
|
||||
*
|
||||
* @return the maximum number of URLs.
|
||||
*/
|
||||
public abstract int getMaxURLs();
|
||||
@@ -214,7 +217,7 @@ public abstract class AbstractGenerator
|
||||
/**
|
||||
* Return whether the written sitemap files and index should be
|
||||
* GZIP-compressed.
|
||||
*
|
||||
*
|
||||
* @return {@code true} if GZIP compression should be used, {@code false}
|
||||
* otherwise.
|
||||
*/
|
||||
@@ -222,7 +225,7 @@ public abstract class AbstractGenerator
|
||||
|
||||
/**
|
||||
* Return the filename a sitemap at the given index should be stored at.
|
||||
*
|
||||
*
|
||||
* @param number
|
||||
* index of the sitemap file (zero is first).
|
||||
* @return the filename to write the sitemap to.
|
||||
@@ -231,14 +234,14 @@ public abstract class AbstractGenerator
|
||||
|
||||
/**
|
||||
* Get the filename the index should be written to.
|
||||
*
|
||||
*
|
||||
* @return the filename of the index.
|
||||
*/
|
||||
public abstract String getIndexFilename();
|
||||
|
||||
/**
|
||||
* Write the index file.
|
||||
*
|
||||
*
|
||||
* @param output
|
||||
* stream to write the index to
|
||||
* @param sitemapCount
|
||||
|
@@ -92,7 +92,7 @@ public class CreateStatReport {
|
||||
|
||||
// create context as super user
|
||||
context = new Context();
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
//get paths to directories
|
||||
outputLogDirectory = ConfigurationManager.getProperty("log.dir") + File.separator;
|
||||
|
@@ -215,7 +215,7 @@ public class LogAnalyser
|
||||
|
||||
// create context as super user
|
||||
Context context = new Context();
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
// set up our command line variables
|
||||
String myLogDir = null;
|
||||
|
@@ -151,7 +151,7 @@ public class ReportGenerator
|
||||
{
|
||||
// create context as super user
|
||||
Context context = new Context();
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
String myFormat = null;
|
||||
String myInput = null;
|
||||
|
@@ -7,37 +7,31 @@
|
||||
*/
|
||||
package org.dspace.app.util;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import org.dspace.authorize.AuthorizeManager;
|
||||
import org.dspace.content.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.Enumeration;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeManager;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.Metadatum;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.handle.HandleManager;
|
||||
|
||||
import org.jdom.Element;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.sql.SQLException;
|
||||
import java.util.*;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
/**
|
||||
* Configuration and mapping for Google Scholar output metadata
|
||||
* @author Sands Fish
|
||||
@@ -125,6 +119,7 @@ public class GoogleMetadata
|
||||
|
||||
private static final int ALL_FIELDS_IN_OPTION = 2;
|
||||
|
||||
private Context ourContext;
|
||||
// Load configured fields from google-metadata.properties
|
||||
static
|
||||
{
|
||||
@@ -216,7 +211,11 @@ public class GoogleMetadata
|
||||
// Hold onto the item in case we need to refresh a stale parse
|
||||
this.item = item;
|
||||
itemURL = HandleManager.resolveToURL(context, item.getHandle());
|
||||
ourContext=context;
|
||||
EPerson currentUser = ourContext.getCurrentUser();
|
||||
ourContext.setCurrentUser(null);
|
||||
parseItem();
|
||||
ourContext.setCurrentUser(currentUser);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -757,16 +756,17 @@ public class GoogleMetadata
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch all metadata mappings
|
||||
*
|
||||
* Fetch retaining the order of the values for any given key in which they
|
||||
* where added (like authors).
|
||||
*
|
||||
* Usage: GoogleMetadata gmd = new GoogleMetadata(item); for(Entry<String,
|
||||
* String> mapping : googlemd.getMappings()) { ... }
|
||||
*
|
||||
* @return Iterable of metadata fields mapped to Google-formatted values
|
||||
*/
|
||||
public Set<Entry<String, String>> getMappings()
|
||||
public Collection<Entry<String, String>> getMappings()
|
||||
{
|
||||
return new HashSet<>(metadataMappings.entries());
|
||||
return metadataMappings.entries();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1041,7 +1041,6 @@ public class GoogleMetadata
|
||||
*/
|
||||
private Bitstream findLinkableFulltext(Item item) throws SQLException {
|
||||
Bitstream bestSoFar = null;
|
||||
int bitstreamCount = 0;
|
||||
Bundle[] contentBundles = item.getBundles("ORIGINAL");
|
||||
for (Bundle bundle : contentBundles) {
|
||||
int primaryBitstreamId = bundle.getPrimaryBitstreamID();
|
||||
@@ -1050,16 +1049,16 @@ public class GoogleMetadata
|
||||
if (candidate.getID() == primaryBitstreamId) { // is primary -> use this one
|
||||
if (isPublic(candidate)) {
|
||||
return candidate;
|
||||
}
|
||||
} else
|
||||
{
|
||||
|
||||
if (bestSoFar == null && isPublic(candidate)) { //if bestSoFar is null but the candidate is not public you don't use it and try to find another
|
||||
bestSoFar = candidate;
|
||||
}
|
||||
}
|
||||
} else if (bestSoFar == null) {
|
||||
bestSoFar = candidate;
|
||||
}
|
||||
bitstreamCount++;
|
||||
}
|
||||
}
|
||||
if (bitstreamCount > 1 || !isPublic(bestSoFar)) {
|
||||
bestSoFar = null;
|
||||
}
|
||||
|
||||
return bestSoFar;
|
||||
}
|
||||
@@ -1069,16 +1068,10 @@ public class GoogleMetadata
|
||||
return false;
|
||||
}
|
||||
boolean result = false;
|
||||
Context context = null;
|
||||
try {
|
||||
context = new Context();
|
||||
result = AuthorizeManager.authorizeActionBoolean(context, bitstream, Constants.READ, true);
|
||||
result = AuthorizeManager.authorizeActionBoolean(ourContext, bitstream, Constants.READ, true);
|
||||
} catch (SQLException e) {
|
||||
log.error("Cannot determine whether bitstream is public, assuming it isn't. bitstream_id=" + bitstream.getID(), e);
|
||||
} finally {
|
||||
if (context != null) {
|
||||
context.abort();
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
@@ -279,11 +279,11 @@ public class LDAPAuthentication
|
||||
{
|
||||
log.info(LogManager.getHeader(context,
|
||||
"type=ldap-login", "type=ldap_but_already_email"));
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
eperson.setNetid(netid.toLowerCase());
|
||||
eperson.update();
|
||||
context.commit();
|
||||
context.setIgnoreAuthorization(false);
|
||||
context.restoreAuthSystemState();
|
||||
context.setCurrentUser(eperson);
|
||||
|
||||
// assign user to groups based on ldap dn
|
||||
@@ -298,7 +298,7 @@ public class LDAPAuthentication
|
||||
// TEMPORARILY turn off authorisation
|
||||
try
|
||||
{
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
eperson = EPerson.create(context);
|
||||
if (StringUtils.isNotEmpty(email))
|
||||
{
|
||||
@@ -332,7 +332,7 @@ public class LDAPAuthentication
|
||||
}
|
||||
finally
|
||||
{
|
||||
context.setIgnoreAuthorization(false);
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
log.info(LogManager.getHeader(context, "authenticate",
|
||||
@@ -354,7 +354,7 @@ public class LDAPAuthentication
|
||||
}
|
||||
finally
|
||||
{
|
||||
context.setIgnoreAuthorization(false);
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -7,11 +7,7 @@
|
||||
*/
|
||||
package org.dspace.authenticate;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
@@ -20,6 +16,10 @@ import org.dspace.core.LogManager;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.Group;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.sql.SQLException;
|
||||
|
||||
/**
|
||||
* A stackable authentication method
|
||||
* based on the DSpace internal "EPerson" database.
|
||||
@@ -128,7 +128,7 @@ public class PasswordAuthentication
|
||||
// ensures they are password users
|
||||
try
|
||||
{
|
||||
if (context.getCurrentUser().getPasswordHash() != null && !context.getCurrentUser().getPasswordHash().toString().equals(""))
|
||||
if (context.getCurrentUser() != null && context.getCurrentUser().getPasswordHash()!=null && StringUtils.isNotBlank(context.getCurrentUser().getPasswordHash().toString()))
|
||||
{
|
||||
String groupName = ConfigurationManager.getProperty("authentication-password", "login.specialgroup");
|
||||
if ((groupName != null) && (!groupName.trim().equals("")))
|
||||
@@ -149,7 +149,7 @@ public class PasswordAuthentication
|
||||
}
|
||||
}
|
||||
catch (Exception e) {
|
||||
// The user is not a password user, so we don't need to worry about them
|
||||
log.error(LogManager.getHeader(context,"getSpecialGroups",""),e);
|
||||
}
|
||||
return new int[0];
|
||||
}
|
||||
|
@@ -612,7 +612,7 @@ public class X509Authentication implements AuthenticationMethod
|
||||
"from=x.509, email=" + email));
|
||||
|
||||
// TEMPORARILY turn off authorisation
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
eperson = EPerson.create(context);
|
||||
eperson.setEmail(email);
|
||||
eperson.setCanLogIn(true);
|
||||
@@ -620,7 +620,7 @@ public class X509Authentication implements AuthenticationMethod
|
||||
eperson);
|
||||
eperson.update();
|
||||
context.commit();
|
||||
context.setIgnoreAuthorization(false);
|
||||
context.restoreAuthSystemState();
|
||||
context.setCurrentUser(eperson);
|
||||
setSpecialGroupsFlag(request, email);
|
||||
return SUCCESS;
|
||||
|
@@ -0,0 +1,20 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author Jonas Van Goolen (jonas at atmire dot com)
|
||||
*/
|
||||
public interface SolrAuthorityInterface {
|
||||
|
||||
List<AuthorityValue> queryAuthorities(String text, int max);
|
||||
|
||||
AuthorityValue queryAuthorityID(String id);
|
||||
}
|
@@ -1,86 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid;
|
||||
|
||||
import org.dspace.authority.AuthorityValue;
|
||||
import org.dspace.authority.orcid.model.Bio;
|
||||
import org.dspace.authority.orcid.model.Work;
|
||||
import org.dspace.authority.orcid.xml.XMLtoBio;
|
||||
import org.dspace.authority.orcid.xml.XMLtoWork;
|
||||
import org.dspace.authority.rest.RestSource;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.utils.DSpace;
|
||||
import org.w3c.dom.Document;
|
||||
|
||||
import java.net.URLEncoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class Orcid extends RestSource {
|
||||
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private static Logger log = Logger.getLogger(Orcid.class);
|
||||
|
||||
private static Orcid orcid;
|
||||
|
||||
public static Orcid getOrcid() {
|
||||
if (orcid == null) {
|
||||
orcid = new DSpace().getServiceManager().getServiceByName("OrcidSource", Orcid.class);
|
||||
}
|
||||
return orcid;
|
||||
}
|
||||
|
||||
private Orcid(String url) {
|
||||
super(url);
|
||||
}
|
||||
|
||||
public Bio getBio(String id) {
|
||||
Document bioDocument = restConnector.get(id + "/orcid-bio");
|
||||
XMLtoBio converter = new XMLtoBio();
|
||||
Bio bio = converter.convert(bioDocument).get(0);
|
||||
bio.setOrcid(id);
|
||||
return bio;
|
||||
}
|
||||
|
||||
public List<Work> getWorks(String id) {
|
||||
Document document = restConnector.get(id + "/orcid-works");
|
||||
XMLtoWork converter = new XMLtoWork();
|
||||
return converter.convert(document);
|
||||
}
|
||||
|
||||
public List<Bio> queryBio(String name, int start, int rows) {
|
||||
Document bioDocument = restConnector.get("search/orcid-bio?q=" + URLEncoder.encode("\"" + name + "\"") + "&start=" + start + "&rows=" + rows);
|
||||
XMLtoBio converter = new XMLtoBio();
|
||||
return converter.convert(bioDocument);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<AuthorityValue> queryAuthorities(String text, int max) {
|
||||
List<Bio> bios = queryBio(text, 0, max);
|
||||
List<AuthorityValue> authorities = new ArrayList<AuthorityValue>();
|
||||
for (Bio bio : bios) {
|
||||
authorities.add(OrcidAuthorityValue.create(bio));
|
||||
}
|
||||
return authorities;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AuthorityValue queryAuthorityID(String id) {
|
||||
Bio bio = getBio(id);
|
||||
return OrcidAuthorityValue.create(bio);
|
||||
}
|
||||
}
|
@@ -1,316 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid;
|
||||
|
||||
import org.dspace.authority.AuthorityValue;
|
||||
import org.dspace.authority.AuthorityValueGenerator;
|
||||
import org.dspace.authority.PersonAuthorityValue;
|
||||
import org.dspace.authority.orcid.model.Bio;
|
||||
import org.dspace.authority.orcid.model.BioExternalIdentifier;
|
||||
import org.dspace.authority.orcid.model.BioName;
|
||||
import org.dspace.authority.orcid.model.BioResearcherUrl;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.solr.common.SolrDocument;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class OrcidAuthorityValue extends PersonAuthorityValue {
|
||||
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private static Logger log = Logger.getLogger(OrcidAuthorityValue.class);
|
||||
|
||||
private String orcid_id;
|
||||
private Map<String, List<String>> otherMetadata = new HashMap<String, List<String>>();
|
||||
private boolean update; // used in setValues(Bio bio)
|
||||
|
||||
|
||||
/**
|
||||
* Creates an instance of OrcidAuthorityValue with only uninitialized fields.
|
||||
* This is meant to be filled in with values from an existing record.
|
||||
* To create a brand new OrcidAuthorityValue, use create()
|
||||
*/
|
||||
public OrcidAuthorityValue() {
|
||||
}
|
||||
|
||||
public OrcidAuthorityValue(SolrDocument document) {
|
||||
super(document);
|
||||
}
|
||||
|
||||
public String getOrcid_id() {
|
||||
return orcid_id;
|
||||
}
|
||||
|
||||
public void setOrcid_id(String orcid_id) {
|
||||
this.orcid_id = orcid_id;
|
||||
}
|
||||
|
||||
public Map<String, List<String>> getOtherMetadata() {
|
||||
return otherMetadata;
|
||||
}
|
||||
|
||||
public void addOtherMetadata(String label, String data) {
|
||||
List<String> strings = otherMetadata.get(label);
|
||||
if (strings == null) {
|
||||
strings = new ArrayList<String>();
|
||||
}
|
||||
strings.add(data);
|
||||
otherMetadata.put(label, strings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SolrInputDocument getSolrInputDocument() {
|
||||
SolrInputDocument doc = super.getSolrInputDocument();
|
||||
if (StringUtils.isNotBlank(getOrcid_id())) {
|
||||
doc.addField("orcid_id", getOrcid_id());
|
||||
}
|
||||
|
||||
for (String t : otherMetadata.keySet()) {
|
||||
List<String> data = otherMetadata.get(t);
|
||||
for (String data_entry : data) {
|
||||
doc.addField("label_" + t, data_entry);
|
||||
}
|
||||
}
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setValues(SolrDocument document) {
|
||||
super.setValues(document);
|
||||
this.orcid_id = String.valueOf(document.getFieldValue("orcid_id"));
|
||||
|
||||
otherMetadata = new HashMap<String, List<String>>();
|
||||
for (String fieldName : document.getFieldNames()) {
|
||||
String labelPrefix = "label_";
|
||||
if (fieldName.startsWith(labelPrefix)) {
|
||||
String label = fieldName.substring(labelPrefix.length());
|
||||
List<String> list = new ArrayList<String>();
|
||||
Collection<Object> fieldValues = document.getFieldValues(fieldName);
|
||||
for (Object o : fieldValues) {
|
||||
list.add(String.valueOf(o));
|
||||
}
|
||||
otherMetadata.put(label, list);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static OrcidAuthorityValue create() {
|
||||
OrcidAuthorityValue orcidAuthorityValue = new OrcidAuthorityValue();
|
||||
orcidAuthorityValue.setId(UUID.randomUUID().toString());
|
||||
orcidAuthorityValue.updateLastModifiedDate();
|
||||
orcidAuthorityValue.setCreationDate(new Date());
|
||||
return orcidAuthorityValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an authority based on a given orcid bio
|
||||
*/
|
||||
public static OrcidAuthorityValue create(Bio bio) {
|
||||
OrcidAuthorityValue authority = OrcidAuthorityValue.create();
|
||||
|
||||
authority.setValues(bio);
|
||||
|
||||
return authority;
|
||||
}
|
||||
|
||||
public boolean setValues(Bio bio) {
|
||||
BioName name = bio.getName();
|
||||
|
||||
if (updateValue(bio.getOrcid(), getOrcid_id())) {
|
||||
setOrcid_id(bio.getOrcid());
|
||||
}
|
||||
|
||||
if (updateValue(name.getFamilyName(), getLastName())) {
|
||||
setLastName(name.getFamilyName());
|
||||
}
|
||||
|
||||
if (updateValue(name.getGivenNames(), getFirstName())) {
|
||||
setFirstName(name.getGivenNames());
|
||||
}
|
||||
|
||||
if (StringUtils.isNotBlank(name.getCreditName())) {
|
||||
if (!getNameVariants().contains(name.getCreditName())) {
|
||||
addNameVariant(name.getCreditName());
|
||||
update = true;
|
||||
}
|
||||
}
|
||||
for (String otherName : name.getOtherNames()) {
|
||||
if (!getNameVariants().contains(otherName)) {
|
||||
addNameVariant(otherName);
|
||||
update = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (updateOtherMetadata("country", bio.getCountry())) {
|
||||
addOtherMetadata("country", bio.getCountry());
|
||||
}
|
||||
|
||||
for (String keyword : bio.getKeywords()) {
|
||||
if (updateOtherMetadata("keyword", keyword)) {
|
||||
addOtherMetadata("keyword", keyword);
|
||||
}
|
||||
}
|
||||
|
||||
for (BioExternalIdentifier externalIdentifier : bio.getBioExternalIdentifiers()) {
|
||||
if (updateOtherMetadata("external_identifier", externalIdentifier.toString())) {
|
||||
addOtherMetadata("external_identifier", externalIdentifier.toString());
|
||||
}
|
||||
}
|
||||
|
||||
for (BioResearcherUrl researcherUrl : bio.getResearcherUrls()) {
|
||||
if (updateOtherMetadata("researcher_url", researcherUrl.toString())) {
|
||||
addOtherMetadata("researcher_url", researcherUrl.toString());
|
||||
}
|
||||
}
|
||||
|
||||
if (updateOtherMetadata("biography", bio.getBiography())) {
|
||||
addOtherMetadata("biography", bio.getBiography());
|
||||
}
|
||||
|
||||
setValue(getName());
|
||||
|
||||
if (update) {
|
||||
update();
|
||||
}
|
||||
boolean result = update;
|
||||
update = false;
|
||||
return result;
|
||||
}
|
||||
|
||||
private boolean updateOtherMetadata(String label, String data) {
|
||||
List<String> strings = getOtherMetadata().get(label);
|
||||
boolean update;
|
||||
if (strings == null) {
|
||||
update = StringUtils.isNotBlank(data);
|
||||
} else {
|
||||
update = !strings.contains(data);
|
||||
}
|
||||
if (update) {
|
||||
this.update = true;
|
||||
}
|
||||
return update;
|
||||
}
|
||||
|
||||
private boolean updateValue(String incoming, String resident) {
|
||||
boolean update = StringUtils.isNotBlank(incoming) && !incoming.equals(resident);
|
||||
if (update) {
|
||||
this.update = true;
|
||||
}
|
||||
return update;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> choiceSelectMap() {
|
||||
|
||||
Map<String, String> map = super.choiceSelectMap();
|
||||
|
||||
map.put("orcid", getOrcid_id());
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
public String getAuthorityType() {
|
||||
return "orcid";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String generateString() {
|
||||
String generateString = AuthorityValueGenerator.GENERATE + getAuthorityType() + AuthorityValueGenerator.SPLIT;
|
||||
if (StringUtils.isNotBlank(getOrcid_id())) {
|
||||
generateString += getOrcid_id();
|
||||
}
|
||||
return generateString;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public AuthorityValue newInstance(String info) {
|
||||
AuthorityValue authorityValue = null;
|
||||
if (StringUtils.isNotBlank(info)) {
|
||||
Orcid orcid = Orcid.getOrcid();
|
||||
authorityValue = orcid.queryAuthorityID(info);
|
||||
} else {
|
||||
authorityValue = OrcidAuthorityValue.create();
|
||||
}
|
||||
return authorityValue;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
OrcidAuthorityValue that = (OrcidAuthorityValue) o;
|
||||
|
||||
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return orcid_id != null ? orcid_id.hashCode() : 0;
|
||||
}
|
||||
|
||||
public boolean hasTheSameInformationAs(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
if (!super.hasTheSameInformationAs(o)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
OrcidAuthorityValue that = (OrcidAuthorityValue) o;
|
||||
|
||||
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (String key : otherMetadata.keySet()) {
|
||||
if(otherMetadata.get(key) != null){
|
||||
List<String> metadata = otherMetadata.get(key);
|
||||
List<String> otherMetadata = that.otherMetadata.get(key);
|
||||
if (otherMetadata == null) {
|
||||
return false;
|
||||
} else {
|
||||
HashSet<String> metadataSet = new HashSet<String>(metadata);
|
||||
HashSet<String> otherMetadataSet = new HashSet<String>(otherMetadata);
|
||||
if (!metadataSet.equals(otherMetadataSet)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}else{
|
||||
if(that.otherMetadata.get(key) != null){
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
185
dspace-api/src/main/java/org/dspace/authority/orcid/Orcidv2.java
Normal file
185
dspace-api/src/main/java/org/dspace/authority/orcid/Orcidv2.java
Normal file
@@ -0,0 +1,185 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.authority.AuthorityValue;
|
||||
import org.dspace.authority.SolrAuthorityInterface;
|
||||
import org.dspace.authority.orcid.xml.XMLtoBio;
|
||||
import org.dspace.authority.rest.RESTConnector;
|
||||
import org.json.JSONObject;
|
||||
import org.orcid.jaxb.model.record_v2.Person;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author Jonas Van Goolen (jonas at atmire dot com)
|
||||
* This class contains all methods for retrieving "Person" objects calling the ORCID (version 2) endpoints.
|
||||
* Additionally, this can also create AuthorityValues based on these returned Person objects
|
||||
*/
|
||||
public class Orcidv2 implements SolrAuthorityInterface {
|
||||
|
||||
private static Logger log = Logger.getLogger(Orcidv2.class);
|
||||
|
||||
public RESTConnector restConnector;
|
||||
private String OAUTHUrl;
|
||||
private String clientId;
|
||||
|
||||
private String clientSecret;
|
||||
|
||||
private String accessToken;
|
||||
|
||||
/**
|
||||
* Initialize the accessToken that is required for all subsequent calls to ORCID
|
||||
*/
|
||||
public void init() throws IOException {
|
||||
if (StringUtils.isNotBlank(accessToken) && StringUtils.isNotBlank(clientSecret)) {
|
||||
String authenticationParameters = "?client_id=" + clientId + "&client_secret=" + clientSecret + "&scope=/read-public&grant_type=client_credentials";
|
||||
HttpPost httpPost = new HttpPost(OAUTHUrl + authenticationParameters);
|
||||
httpPost.addHeader("Accept", "application/json");
|
||||
httpPost.addHeader("Content-Type", "application/x-www-form-urlencoded");
|
||||
|
||||
HttpClient httpClient = HttpClientBuilder.create().build();
|
||||
HttpResponse getResponse = httpClient.execute(httpPost);
|
||||
|
||||
InputStream is = getResponse.getEntity().getContent();
|
||||
BufferedReader streamReader = new BufferedReader(new InputStreamReader(is, "UTF-8"));
|
||||
|
||||
JSONObject responseObject = null;
|
||||
String inputStr;
|
||||
while ((inputStr = streamReader.readLine()) != null && responseObject == null) {
|
||||
if (inputStr.startsWith("{") && inputStr.endsWith("}") && inputStr.contains("access_token")) {
|
||||
try {
|
||||
responseObject = new JSONObject(inputStr);
|
||||
} catch (Exception e) {
|
||||
//Not as valid as I'd hoped, move along
|
||||
responseObject = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (responseObject != null && responseObject.has("access_token")) {
|
||||
accessToken = (String) responseObject.get("access_token");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes an instance of the Orcidv2 class based on the provided parameters.
|
||||
* This constructor is called through the spring bean initialization
|
||||
*/
|
||||
private Orcidv2(String url, String OAUTHUrl, String clientId, String clientSecret) {
|
||||
this.restConnector = new RESTConnector(url);
|
||||
this.OAUTHUrl = OAUTHUrl;
|
||||
this.clientId = clientId;
|
||||
this.clientSecret = clientSecret;
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes an instance of the Orcidv2 class based on the provided parameters.
|
||||
* This constructor is called through the spring bean initialization
|
||||
*/
|
||||
private Orcidv2(String url) {
|
||||
this.restConnector = new RESTConnector(url);
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes an instance of the AuthorityValue with the given information.
|
||||
* @param text search string
|
||||
* @return List<AuthorityValue>
|
||||
*/
|
||||
@Override
|
||||
public List<AuthorityValue> queryAuthorities(String text, int max) {
|
||||
List<Person> bios = queryBio(text, max);
|
||||
List<AuthorityValue> result = new ArrayList<>();
|
||||
for (Person person : bios) {
|
||||
AuthorityValue orcidAuthorityValue = Orcidv2AuthorityValue.create(person);
|
||||
if (orcidAuthorityValue != null) {
|
||||
result.add(orcidAuthorityValue);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an AuthorityValue from a Person retrieved using the given orcid identifier.
|
||||
* @param id orcid identifier
|
||||
* @return AuthorityValue
|
||||
*/
|
||||
public AuthorityValue queryAuthorityID(String id) {
|
||||
Person person = getBio(id);
|
||||
AuthorityValue valueFromPerson = Orcidv2AuthorityValue.create(person);
|
||||
return valueFromPerson;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a Person object based on a given orcid identifier
|
||||
* @param id orcid identifier
|
||||
* @return Person
|
||||
*/
|
||||
public Person getBio(String id) {
|
||||
log.debug("getBio called with ID=" + id);
|
||||
if(!isValid(id)){
|
||||
return null;
|
||||
}
|
||||
InputStream bioDocument = restConnector.get(id + ((id.endsWith("/person")) ? "" : "/person"), accessToken);
|
||||
XMLtoBio converter = new XMLtoBio();
|
||||
Person person = converter.convertSinglePerson(bioDocument);
|
||||
return person;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Retrieve a list of Person objects.
|
||||
* @param text search string
|
||||
* @param start offset to use
|
||||
* @param rows how many rows to return
|
||||
* @return List<Person>
|
||||
*/
|
||||
public List<Person> queryBio(String text, int start, int rows) {
|
||||
if (rows > 100) {
|
||||
throw new IllegalArgumentException("The maximum number of results to retrieve cannot exceed 100.");
|
||||
}
|
||||
|
||||
String searchPath = "search?q=" + URLEncoder.encode(text) + "&start=" + start + "&rows=" + rows;
|
||||
log.debug("queryBio searchPath=" + searchPath + " accessToken=" + accessToken);
|
||||
InputStream bioDocument = restConnector.get(searchPath, accessToken);
|
||||
XMLtoBio converter = new XMLtoBio();
|
||||
List<Person> bios = converter.convert(bioDocument);
|
||||
return bios;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a list of Person objects.
|
||||
* @param text search string
|
||||
* @param max how many rows to return
|
||||
* @return List<Person>
|
||||
*/
|
||||
public List<Person> queryBio(String text, int max) {
|
||||
return queryBio(text, 0, max);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check to see if the provided text has the correct ORCID syntax.
|
||||
* Since only searching on ORCID id is allowed, this way, we filter out any queries that would return a blank result anyway
|
||||
*/
|
||||
private boolean isValid(String text) {
|
||||
return StringUtils.isNotBlank(text) && text.matches(Orcidv2AuthorityValue.ORCID_ID_SYNTAX);
|
||||
}
|
||||
}
|
@@ -0,0 +1,330 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.solr.common.SolrDocument;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.dspace.authority.AuthorityValue;
|
||||
import org.dspace.authority.AuthorityValueGenerator;
|
||||
import org.dspace.authority.PersonAuthorityValue;
|
||||
import org.dspace.utils.DSpace;
|
||||
import org.orcid.jaxb.model.common_v2.ExternalId;
|
||||
import org.orcid.jaxb.model.record_v2.*;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* @author Jonas Van Goolen (jonas at atmire dot com)
|
||||
*/
|
||||
public class Orcidv2AuthorityValue extends PersonAuthorityValue {
|
||||
|
||||
/*
|
||||
* The ORCID identifier
|
||||
*/
|
||||
private String orcid_id;
|
||||
|
||||
/*
|
||||
* Map containing key-value pairs filled in by "setValues(Person person)".
|
||||
* This represents all dynamic information of the object.
|
||||
*/
|
||||
private Map<String, List<String>> otherMetadata = new HashMap<String, List<String>>();
|
||||
|
||||
/**
|
||||
* The syntax that the ORCID id needs to conform to
|
||||
*/
|
||||
public static final String ORCID_ID_SYNTAX = "\\d{4}-\\d{4}-\\d{4}-(\\d{3}X|\\d{4})";
|
||||
|
||||
|
||||
/**
|
||||
* Creates an instance of Orcidv2AuthorityValue with only uninitialized fields.
|
||||
* This is meant to be filled in with values from an existing record.
|
||||
* To create a brand new Orcidv2AuthorityValue, use create()
|
||||
*/
|
||||
public Orcidv2AuthorityValue() {
|
||||
}
|
||||
|
||||
public Orcidv2AuthorityValue(SolrDocument document) {
|
||||
super(document);
|
||||
}
|
||||
|
||||
|
||||
public String getOrcid_id() {
|
||||
return orcid_id;
|
||||
}
|
||||
|
||||
public void setOrcid_id(String orcid_id) {
|
||||
this.orcid_id = orcid_id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an empty authority.
|
||||
* @return OrcidAuthorityValue
|
||||
*/
|
||||
public static Orcidv2AuthorityValue create() {
|
||||
Orcidv2AuthorityValue orcidAuthorityValue = new Orcidv2AuthorityValue();
|
||||
orcidAuthorityValue.setId(UUID.randomUUID().toString());
|
||||
orcidAuthorityValue.updateLastModifiedDate();
|
||||
orcidAuthorityValue.setCreationDate(new Date());
|
||||
return orcidAuthorityValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an authority based on a given orcid bio
|
||||
* @return OrcidAuthorityValue
|
||||
*/
|
||||
public static Orcidv2AuthorityValue create(Person person) {
|
||||
if (person == null) {
|
||||
return null;
|
||||
}
|
||||
Orcidv2AuthorityValue authority = Orcidv2AuthorityValue.create();
|
||||
|
||||
authority.setValues(person);
|
||||
|
||||
return authority;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize this instance based on a Person object
|
||||
* @param person Person
|
||||
*/
|
||||
protected void setValues(Person person) {
|
||||
NameType name = person.getName();
|
||||
|
||||
if (!StringUtils.equals(name.getPath(), this.getOrcid_id())) {
|
||||
this.setOrcid_id(name.getPath());
|
||||
}
|
||||
|
||||
if (!StringUtils.equals(name.getFamilyName().getValue(), this.getLastName())) {
|
||||
this.setLastName(name.getFamilyName().getValue());
|
||||
}
|
||||
|
||||
if (!StringUtils.equals(name.getGivenNames().getValue(), this.getFirstName())) {
|
||||
this.setFirstName(name.getGivenNames().getValue());
|
||||
}
|
||||
|
||||
if (name.getCreditName() != null && StringUtils.isNotBlank(name.getCreditName().getValue())) {
|
||||
if (!this.getNameVariants().contains(name.getCreditName())) {
|
||||
this.addNameVariant(name.getCreditName().getValue());
|
||||
}
|
||||
}
|
||||
|
||||
if (person.getKeywords() != null) {
|
||||
for (KeywordType keyword : person.getKeywords().getKeyword()) {
|
||||
if (this.isNewMetadata("keyword", keyword.getContent())) {
|
||||
this.addOtherMetadata("keyword", keyword.getContent());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ExternalIdentifiers externalIdentifiers = person.getExternalIdentifiers();
|
||||
if (externalIdentifiers != null) {
|
||||
for (ExternalId externalIdentifier : externalIdentifiers.getExternalIdentifier()) {
|
||||
if (this.isNewMetadata("external_identifier", externalIdentifier.getExternalIdValue())) {
|
||||
this.addOtherMetadata("external_identifier", externalIdentifier.getExternalIdValue());
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
if (person.getResearcherUrls() != null) {
|
||||
for (ResearcherUrlType researcherUrl : person.getResearcherUrls().getResearcherUrl()) {
|
||||
if (this.isNewMetadata("researcher_url", researcherUrl.getUrl().getValue())) {
|
||||
this.addOtherMetadata("researcher_url", researcherUrl.getUrl().getValue());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
if (person.getBiography() != null) {
|
||||
if (this.isNewMetadata("biography", person.getBiography().getContent())) {
|
||||
this.addOtherMetadata("biography", person.getBiography().getContent());
|
||||
}
|
||||
}
|
||||
|
||||
this.setValue(this.getName());
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes an instance of the AuthorityValue with the given information.
|
||||
* @param info string info
|
||||
* @return AuthorityValue
|
||||
*/
|
||||
@Override
|
||||
public AuthorityValue newInstance(String info) {
|
||||
AuthorityValue authorityValue = null;
|
||||
if (StringUtils.isNotBlank(info)) {
|
||||
Orcidv2 orcid = new DSpace().getServiceManager().getServiceByName("AuthoritySource", Orcidv2.class);
|
||||
authorityValue = orcid.queryAuthorityID(info);
|
||||
} else {
|
||||
authorityValue = this.create();
|
||||
}
|
||||
return authorityValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setValue(String value) {
|
||||
super.setValue(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check to see if the provided label / data pair is already present in the "otherMetadata" or not
|
||||
* */
|
||||
public boolean isNewMetadata(String label, String data) {
|
||||
List<String> strings = getOtherMetadata().get(label);
|
||||
boolean update;
|
||||
if (strings == null) {
|
||||
update = StringUtils.isNotBlank(data);
|
||||
} else {
|
||||
update = !strings.contains(data);
|
||||
}
|
||||
return update;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add additional metadata to the otherMetadata map*/
|
||||
public void addOtherMetadata(String label, String data) {
|
||||
List<String> strings = otherMetadata.get(label);
|
||||
if (strings == null) {
|
||||
strings = new ArrayList<>();
|
||||
}
|
||||
strings.add(data);
|
||||
otherMetadata.put(label, strings);
|
||||
}
|
||||
|
||||
public Map<String, List<String>> getOtherMetadata() {
|
||||
return otherMetadata;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Generate a solr record from this instance
|
||||
* @return SolrInputDocument
|
||||
*/
|
||||
@Override
|
||||
public SolrInputDocument getSolrInputDocument() {
|
||||
SolrInputDocument doc = super.getSolrInputDocument();
|
||||
if (StringUtils.isNotBlank(getOrcid_id())) {
|
||||
doc.addField("orcid_id", getOrcid_id());
|
||||
}
|
||||
|
||||
for (String t : otherMetadata.keySet()) {
|
||||
List<String> data = otherMetadata.get(t);
|
||||
for (String data_entry : data) {
|
||||
doc.addField("label_" + t, data_entry);
|
||||
}
|
||||
}
|
||||
return doc;
|
||||
}
|
||||
|
||||
/**
|
||||
* Information that can be used the choice ui
|
||||
* @return map
|
||||
*/
|
||||
@Override
|
||||
public Map<String, String> choiceSelectMap() {
|
||||
|
||||
Map<String, String> map = super.choiceSelectMap();
|
||||
|
||||
String orcid_id = getOrcid_id();
|
||||
if (StringUtils.isNotBlank(orcid_id)) {
|
||||
map.put("orcid", orcid_id);
|
||||
}
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAuthorityType() {
|
||||
return "orcid";
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a string that will allow this AuthorityType to be recognized and provides information to create a new instance to be created using public Orcidv2AuthorityValue newInstance(String info).
|
||||
* @return see {@link org.dspace.authority.AuthorityValueGenerator#GENERATE AuthorityValueGenerator.GENERATE}
|
||||
*/
|
||||
@Override
|
||||
public String generateString() {
|
||||
String generateString = AuthorityValueGenerator.GENERATE + getAuthorityType() + AuthorityValueGenerator.SPLIT;
|
||||
if (StringUtils.isNotBlank(getOrcid_id())) {
|
||||
generateString += getOrcid_id();
|
||||
}
|
||||
return generateString;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Orcidv2AuthorityValue that = (Orcidv2AuthorityValue) o;
|
||||
|
||||
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return orcid_id != null ? orcid_id.hashCode() : 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* The regular equals() only checks if both AuthorityValues describe the same authority.
|
||||
* This method checks if the AuthorityValues have different information
|
||||
* E.g. it is used to decide when lastModified should be updated.
|
||||
* @param o object
|
||||
* @return true or false
|
||||
*/
|
||||
@Override
|
||||
public boolean hasTheSameInformationAs(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
if (!super.hasTheSameInformationAs(o)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Orcidv2AuthorityValue that = (Orcidv2AuthorityValue) o;
|
||||
|
||||
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (String key : otherMetadata.keySet()) {
|
||||
if (otherMetadata.get(key) != null) {
|
||||
List<String> metadata = otherMetadata.get(key);
|
||||
List<String> otherMetadata = that.otherMetadata.get(key);
|
||||
if (otherMetadata == null) {
|
||||
return false;
|
||||
} else {
|
||||
HashSet<String> metadataSet = new HashSet<String>(metadata);
|
||||
HashSet<String> otherMetadataSet = new HashSet<String>(otherMetadata);
|
||||
if (!metadataSet.equals(otherMetadataSet)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (that.otherMetadata.get(key) != null) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
@@ -1,113 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class Bio {
|
||||
|
||||
protected String orcid;
|
||||
|
||||
protected BioName name;
|
||||
|
||||
protected String country;
|
||||
|
||||
protected Set<String> keywords;
|
||||
|
||||
protected Set<BioExternalIdentifier> bioExternalIdentifiers;
|
||||
|
||||
protected Set<BioResearcherUrl> researcherUrls;
|
||||
|
||||
protected String biography;
|
||||
|
||||
public Bio() {
|
||||
this.name = new BioName();
|
||||
keywords = new LinkedHashSet<String>();
|
||||
bioExternalIdentifiers = new LinkedHashSet<BioExternalIdentifier>();
|
||||
researcherUrls = new LinkedHashSet<BioResearcherUrl>();
|
||||
}
|
||||
|
||||
public String getOrcid() {
|
||||
return orcid;
|
||||
}
|
||||
|
||||
public void setOrcid(String orcid) {
|
||||
this.orcid = orcid;
|
||||
}
|
||||
|
||||
public BioName getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(BioName name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getCountry() {
|
||||
return country;
|
||||
}
|
||||
|
||||
public void setCountry(String country) {
|
||||
this.country = country;
|
||||
}
|
||||
|
||||
public Set<String> getKeywords() {
|
||||
return keywords;
|
||||
}
|
||||
|
||||
public void addKeyword(String keyword) {
|
||||
this.keywords.add(keyword);
|
||||
}
|
||||
|
||||
public Set<BioExternalIdentifier> getBioExternalIdentifiers() {
|
||||
return bioExternalIdentifiers;
|
||||
}
|
||||
|
||||
public void addExternalIdentifier(BioExternalIdentifier externalReference) {
|
||||
bioExternalIdentifiers.add(externalReference);
|
||||
}
|
||||
|
||||
public Set<BioResearcherUrl> getResearcherUrls() {
|
||||
return researcherUrls;
|
||||
}
|
||||
|
||||
public void addResearcherUrl(BioResearcherUrl researcherUrl) {
|
||||
researcherUrls.add(researcherUrl);
|
||||
}
|
||||
|
||||
public String getBiography() {
|
||||
return biography;
|
||||
}
|
||||
|
||||
public void setBiography(String biography) {
|
||||
this.biography = biography;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Bio{" +
|
||||
"orcid='" + orcid + '\'' +
|
||||
", name=" + name +
|
||||
", country='" + country + '\'' +
|
||||
", keywords=" + keywords +
|
||||
", bioExternalIdentifiers=" + bioExternalIdentifiers +
|
||||
", researcherUrls=" + researcherUrls +
|
||||
", biography='" + biography + '\'' +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
|
@@ -1,109 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class BioExternalIdentifier {
|
||||
|
||||
|
||||
protected String id_orcid;
|
||||
protected String id_common_name;
|
||||
protected String id_reference;
|
||||
protected String id_url;
|
||||
|
||||
public BioExternalIdentifier(String id_orcid, String id_common_name, String id_reference, String id_url) {
|
||||
this.id_orcid = id_orcid;
|
||||
this.id_common_name = id_common_name;
|
||||
this.id_reference = id_reference;
|
||||
this.id_url = id_url;
|
||||
}
|
||||
|
||||
public String getId_orcid() {
|
||||
return id_orcid;
|
||||
}
|
||||
|
||||
public void setId_orcid(String id_orcid) {
|
||||
this.id_orcid = id_orcid;
|
||||
}
|
||||
|
||||
public String getId_common_name() {
|
||||
return id_common_name;
|
||||
}
|
||||
|
||||
public void setId_common_name(String id_common_name) {
|
||||
this.id_common_name = id_common_name;
|
||||
}
|
||||
|
||||
public String getId_reference() {
|
||||
return id_reference;
|
||||
}
|
||||
|
||||
public void setId_reference(String id_reference) {
|
||||
this.id_reference = id_reference;
|
||||
}
|
||||
|
||||
public String getId_url() {
|
||||
return id_url;
|
||||
}
|
||||
|
||||
public void setId_url(String id_url) {
|
||||
this.id_url = id_url;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "BioExternalIdentifier{" +
|
||||
"id_orcid='" + id_orcid + '\'' +
|
||||
", id_common_name='" + id_common_name + '\'' +
|
||||
", id_reference='" + id_reference + '\'' +
|
||||
", id_url='" + id_url + '\'' +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
BioExternalIdentifier that = (BioExternalIdentifier) o;
|
||||
|
||||
if (id_common_name != null ? !id_common_name.equals(that.id_common_name) : that.id_common_name != null) {
|
||||
return false;
|
||||
}
|
||||
if (id_orcid != null ? !id_orcid.equals(that.id_orcid) : that.id_orcid != null) {
|
||||
return false;
|
||||
}
|
||||
if (id_reference != null ? !id_reference.equals(that.id_reference) : that.id_reference != null) {
|
||||
return false;
|
||||
}
|
||||
if (id_url != null ? !id_url.equals(that.id_url) : that.id_url != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = id_orcid != null ? id_orcid.hashCode() : 0;
|
||||
result = 31 * result + (id_common_name != null ? id_common_name.hashCode() : 0);
|
||||
result = 31 * result + (id_reference != null ? id_reference.hashCode() : 0);
|
||||
result = 31 * result + (id_url != null ? id_url.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
}
|
@@ -1,115 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class BioName {
|
||||
|
||||
protected String givenNames;
|
||||
protected String familyName;
|
||||
protected String creditName;
|
||||
protected List<String> otherNames;
|
||||
|
||||
BioName() {
|
||||
otherNames = new ArrayList<String>();
|
||||
}
|
||||
|
||||
BioName(String givenNames, String familyName, String creditName, List<String> otherNames) {
|
||||
this.givenNames = givenNames;
|
||||
this.familyName = familyName;
|
||||
this.creditName = creditName;
|
||||
this.otherNames = otherNames;
|
||||
}
|
||||
|
||||
public String getGivenNames() {
|
||||
return givenNames;
|
||||
}
|
||||
|
||||
public void setGivenNames(String givenNames) {
|
||||
this.givenNames = givenNames;
|
||||
}
|
||||
|
||||
public String getFamilyName() {
|
||||
return familyName;
|
||||
}
|
||||
|
||||
public void setFamilyName(String familyName) {
|
||||
this.familyName = familyName;
|
||||
}
|
||||
|
||||
public String getCreditName() {
|
||||
return creditName;
|
||||
}
|
||||
|
||||
public void setCreditName(String creditName) {
|
||||
this.creditName = creditName;
|
||||
}
|
||||
|
||||
public List<String> getOtherNames() {
|
||||
return otherNames;
|
||||
}
|
||||
|
||||
public void setOtherNames(List<String> otherNames) {
|
||||
this.otherNames = otherNames;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "BioName{" +
|
||||
"givenNames='" + givenNames + '\'' +
|
||||
", familyName='" + familyName + '\'' +
|
||||
", creditName='" + creditName + '\'' +
|
||||
", otherNames=" + otherNames +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
BioName bioName = (BioName) o;
|
||||
|
||||
if (creditName != null ? !creditName.equals(bioName.creditName) : bioName.creditName != null) {
|
||||
return false;
|
||||
}
|
||||
if (familyName != null ? !familyName.equals(bioName.familyName) : bioName.familyName != null) {
|
||||
return false;
|
||||
}
|
||||
if (givenNames != null ? !givenNames.equals(bioName.givenNames) : bioName.givenNames != null) {
|
||||
return false;
|
||||
}
|
||||
if (otherNames != null ? !otherNames.equals(bioName.otherNames) : bioName.otherNames != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = givenNames != null ? givenNames.hashCode() : 0;
|
||||
result = 31 * result + (familyName != null ? familyName.hashCode() : 0);
|
||||
result = 31 * result + (creditName != null ? creditName.hashCode() : 0);
|
||||
result = 31 * result + (otherNames != null ? otherNames.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
}
|
@@ -1,78 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class BioResearcherUrl {
|
||||
|
||||
protected String name;
|
||||
protected String url;
|
||||
|
||||
public BioResearcherUrl(String name, String url) {
|
||||
this.name = name;
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
public void setUrl(String url) {
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "BioResearcherUrl{" +
|
||||
"name='" + name + '\'' +
|
||||
", url='" + url + '\'' +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
BioResearcherUrl that = (BioResearcherUrl) o;
|
||||
|
||||
if (name != null ? !name.equals(that.name) : that.name != null) {
|
||||
return false;
|
||||
}
|
||||
if (url != null ? !url.equals(that.url) : that.url != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = name != null ? name.hashCode() : 0;
|
||||
result = 31 * result + (url != null ? url.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
}
|
@@ -1,50 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class Citation {
|
||||
|
||||
private CitationType type;
|
||||
private String citation;
|
||||
|
||||
public Citation(CitationType type, String citation) {
|
||||
this.type = type;
|
||||
this.citation = citation;
|
||||
}
|
||||
|
||||
public CitationType getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public void setType(CitationType type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public String getCitation() {
|
||||
return citation;
|
||||
}
|
||||
|
||||
public void setCitation(String citation) {
|
||||
this.citation = citation;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Citation{" +
|
||||
"type=" + type +
|
||||
", citation='" + citation + '\'' +
|
||||
'}';
|
||||
}
|
||||
}
|
@@ -1,29 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public enum CitationType {
|
||||
|
||||
FORMATTED_UNSPECIFIED,
|
||||
BIBTEX,
|
||||
FORMATTED_APA,
|
||||
FORMATTED_HARVARD,
|
||||
FORMATTED_IEEE,
|
||||
FORMATTED_MLA,
|
||||
FORMATTED_VANCOUVER,
|
||||
FORMATTED_CHICAGO
|
||||
|
||||
}
|
@@ -1,111 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class Contributor {
|
||||
|
||||
private String orcid;
|
||||
private String creditName;
|
||||
private String email;
|
||||
private Set<ContributorAttribute> contributorAttributes;
|
||||
|
||||
public Contributor(String orcid, String creditName, String email, Set<ContributorAttribute> contributorAttributes) {
|
||||
this.orcid = orcid;
|
||||
this.creditName = creditName;
|
||||
this.email = email;
|
||||
this.contributorAttributes = contributorAttributes;
|
||||
}
|
||||
|
||||
public String getOrcid() {
|
||||
return orcid;
|
||||
}
|
||||
|
||||
public void setOrcid(String orcid) {
|
||||
this.orcid = orcid;
|
||||
}
|
||||
|
||||
public String getCreditName() {
|
||||
return creditName;
|
||||
}
|
||||
|
||||
public void setCreditName(String creditName) {
|
||||
this.creditName = creditName;
|
||||
}
|
||||
|
||||
public String getEmail() {
|
||||
return email;
|
||||
}
|
||||
|
||||
public void setEmail(String email) {
|
||||
this.email = email;
|
||||
}
|
||||
|
||||
public Set<ContributorAttribute> getContributorAttributes() {
|
||||
return contributorAttributes;
|
||||
}
|
||||
|
||||
public void setContributorAttributes(Set<ContributorAttribute> contributorAttributes) {
|
||||
this.contributorAttributes = contributorAttributes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Contributor{" +
|
||||
"orcid='" + orcid + '\'' +
|
||||
", creditName='" + creditName + '\'' +
|
||||
", email='" + email + '\'' +
|
||||
", contributorAttributes=" + contributorAttributes +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Contributor that = (Contributor) o;
|
||||
|
||||
if (contributorAttributes != null ? !contributorAttributes.equals(that.contributorAttributes) : that.contributorAttributes != null) {
|
||||
return false;
|
||||
}
|
||||
if (creditName != null ? !creditName.equals(that.creditName) : that.creditName != null) {
|
||||
return false;
|
||||
}
|
||||
if (email != null ? !email.equals(that.email) : that.email != null) {
|
||||
return false;
|
||||
}
|
||||
if (orcid != null ? !orcid.equals(that.orcid) : that.orcid != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = orcid != null ? orcid.hashCode() : 0;
|
||||
result = 31 * result + (creditName != null ? creditName.hashCode() : 0);
|
||||
result = 31 * result + (email != null ? email.hashCode() : 0);
|
||||
result = 31 * result + (contributorAttributes != null ? contributorAttributes.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
}
|
@@ -1,79 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class ContributorAttribute {
|
||||
|
||||
private ContributorAttributeRole role;
|
||||
private ContributorAttributeSequence sequence;
|
||||
|
||||
public ContributorAttribute(ContributorAttributeRole role, ContributorAttributeSequence sequence) {
|
||||
this.role = role;
|
||||
this.sequence = sequence;
|
||||
}
|
||||
|
||||
public ContributorAttributeRole getRole() {
|
||||
return role;
|
||||
}
|
||||
|
||||
public void setRole(ContributorAttributeRole role) {
|
||||
this.role = role;
|
||||
}
|
||||
|
||||
public ContributorAttributeSequence getSequence() {
|
||||
return sequence;
|
||||
}
|
||||
|
||||
public void setSequence(ContributorAttributeSequence sequence) {
|
||||
this.sequence = sequence;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ContributorAttribute{" +
|
||||
"role=" + role +
|
||||
", sequence=" + sequence +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ContributorAttribute that = (ContributorAttribute) o;
|
||||
|
||||
if (role != that.role) {
|
||||
return false;
|
||||
}
|
||||
if (sequence != that.sequence) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = role != null ? role.hashCode() : 0;
|
||||
result = 31 * result + (sequence != null ? sequence.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
}
|
@@ -1,32 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
/**
|
||||
* http://support.orcid.org/knowledgebase/articles/118843-anatomy-of-a-contributor
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public enum ContributorAttributeRole {
|
||||
|
||||
AUTHOR,
|
||||
ASSIGNEE,
|
||||
EDITOR,
|
||||
CHAIR_OR_TRANSLATOR,
|
||||
CO_INVESTIGATOR,
|
||||
CO_INVENTOR,
|
||||
GRADUATE_STUDENT,
|
||||
OTHER_INVENTOR,
|
||||
PRINCIPAL_INVESTIGATOR,
|
||||
POSTDOCTORAL_RESEARCHER,
|
||||
SUPPORT_STAFF
|
||||
|
||||
}
|
@@ -1,23 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
/**
|
||||
* http://support.orcid.org/knowledgebase/articles/118843-anatomy-of-a-contributor
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public enum ContributorAttributeSequence {
|
||||
|
||||
FIRST,
|
||||
ADDITIONAL
|
||||
|
||||
}
|
@@ -1,117 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class Work {
|
||||
|
||||
private WorkTitle workTitle;
|
||||
private String description;
|
||||
private Citation citation;
|
||||
private WorkType workType;
|
||||
private String publicationDate;
|
||||
private WorkExternalIdentifier workExternalIdentifier;
|
||||
private String url;
|
||||
private Set<Contributor> contributors;
|
||||
private String workSource;
|
||||
|
||||
public WorkTitle getWorkTitle() {
|
||||
return workTitle;
|
||||
}
|
||||
|
||||
public void setWorkTitle(WorkTitle workTitle) {
|
||||
this.workTitle = workTitle;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public Citation getCitation() {
|
||||
return citation;
|
||||
}
|
||||
|
||||
public void setCitation(Citation citation) {
|
||||
this.citation = citation;
|
||||
}
|
||||
|
||||
public WorkType getWorkType() {
|
||||
return workType;
|
||||
}
|
||||
|
||||
public void setWorkType(WorkType workType) {
|
||||
this.workType = workType;
|
||||
}
|
||||
|
||||
public String getPublicationDate() {
|
||||
return publicationDate;
|
||||
}
|
||||
|
||||
public void setPublicationDate(String publicationDate) {
|
||||
this.publicationDate = publicationDate;
|
||||
}
|
||||
|
||||
public WorkExternalIdentifier getWorkExternalIdentifier() {
|
||||
return workExternalIdentifier;
|
||||
}
|
||||
|
||||
public void setWorkExternalIdentifier(WorkExternalIdentifier workExternalIdentifier) {
|
||||
this.workExternalIdentifier = workExternalIdentifier;
|
||||
}
|
||||
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
public void setUrl(String url) {
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
public Set<Contributor> getContributors() {
|
||||
return contributors;
|
||||
}
|
||||
|
||||
public void setContributors(Set<Contributor> contributors) {
|
||||
this.contributors = contributors;
|
||||
}
|
||||
|
||||
public String getWorkSource() {
|
||||
return workSource;
|
||||
}
|
||||
|
||||
public void setWorkSource(String workSource) {
|
||||
this.workSource = workSource;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Work{" +
|
||||
"workTitle=" + workTitle +
|
||||
", description='" + description + '\'' +
|
||||
", citation=" + citation +
|
||||
", workType=" + workType +
|
||||
", publicationDate='" + publicationDate + '\'' +
|
||||
", workExternalIdentifier=" + workExternalIdentifier +
|
||||
", url='" + url + '\'' +
|
||||
", contributors=" + contributors +
|
||||
", workSource='" + workSource + '\'' +
|
||||
'}';
|
||||
}
|
||||
}
|
@@ -1,71 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
/**
|
||||
* http://support.orcid.org/knowledgebase/articles/118807
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class WorkExternalIdentifier {
|
||||
|
||||
private WorkExternalIdentifierType workExternalIdentifierType;
|
||||
private String workExternalIdenfitierID;
|
||||
|
||||
public WorkExternalIdentifier(WorkExternalIdentifierType workExternalIdentifierType, String workExternalIdenfitierID) {
|
||||
this.workExternalIdentifierType = workExternalIdentifierType;
|
||||
this.workExternalIdenfitierID = workExternalIdenfitierID;
|
||||
}
|
||||
|
||||
public WorkExternalIdentifierType getWorkExternalIdentifierType() {
|
||||
return workExternalIdentifierType;
|
||||
}
|
||||
|
||||
public void setWorkExternalIdentifierType(WorkExternalIdentifierType workExternalIdentifierType) {
|
||||
this.workExternalIdentifierType = workExternalIdentifierType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "WorkExternalIdentifier{" +
|
||||
"workExternalIdentifierType=" + workExternalIdentifierType +
|
||||
", workExternalIdenfitierID='" + workExternalIdenfitierID + '\'' +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
WorkExternalIdentifier that = (WorkExternalIdentifier) o;
|
||||
|
||||
if (workExternalIdenfitierID != null ? !workExternalIdenfitierID.equals(that.workExternalIdenfitierID) : that.workExternalIdenfitierID != null) {
|
||||
return false;
|
||||
}
|
||||
if (workExternalIdentifierType != that.workExternalIdentifierType) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = workExternalIdentifierType != null ? workExternalIdentifierType.hashCode() : 0;
|
||||
result = 31 * result + (workExternalIdenfitierID != null ? workExternalIdenfitierID.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
}
|
@@ -1,42 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
/**
|
||||
* http://support.orcid.org/knowledgebase/articles/118807
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public enum WorkExternalIdentifierType {
|
||||
|
||||
// OTHER_ID,
|
||||
ARXIV,
|
||||
ASIN,
|
||||
ASIN_TLD,
|
||||
BIBCODE,
|
||||
DOI,
|
||||
EID,
|
||||
ISBN,
|
||||
ISSN,
|
||||
JFM,
|
||||
JSTOR,
|
||||
LCCN,
|
||||
MR,
|
||||
OCLC,
|
||||
OL,
|
||||
OSTI,
|
||||
PMC,
|
||||
PMID,
|
||||
RFC,
|
||||
SSRN,
|
||||
ZBL
|
||||
|
||||
}
|
@@ -1,64 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* http://support.orcid.org/knowledgebase/articles/118807
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class WorkTitle {
|
||||
|
||||
private String title;
|
||||
private String subtitle;
|
||||
private Map<String, String> translatedTitles;
|
||||
|
||||
public WorkTitle(String title, String subtitle, Map<String, String> translatedTitles) {
|
||||
this.title = title;
|
||||
this.subtitle = subtitle;
|
||||
this.translatedTitles = translatedTitles;
|
||||
}
|
||||
|
||||
public String getTitle() {
|
||||
return title;
|
||||
}
|
||||
|
||||
public void setTitle(String title) {
|
||||
this.title = title;
|
||||
}
|
||||
|
||||
public String getSubtitle() {
|
||||
return subtitle;
|
||||
}
|
||||
|
||||
public void setSubtitle(String subtitle) {
|
||||
this.subtitle = subtitle;
|
||||
}
|
||||
|
||||
public String getTranslatedTitles(String languageCode) {
|
||||
return translatedTitles.get(languageCode);
|
||||
}
|
||||
|
||||
public void setTranslatedTitle(String languageCode, String translatedTitle) {
|
||||
translatedTitles.put(languageCode, translatedTitle);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "WorkTitle{" +
|
||||
"title='" + title + '\'' +
|
||||
", subtitle='" + subtitle + '\'' +
|
||||
", translatedTitles=" + translatedTitles +
|
||||
'}';
|
||||
}
|
||||
}
|
@@ -1,57 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
/**
|
||||
* http://support.orcid.org/knowledgebase/articles/118795
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public enum WorkType {
|
||||
|
||||
BOOK,
|
||||
BOOK_CHAPTER,
|
||||
BOOK_REVIEW,
|
||||
DICTIONARY_ENTRY,
|
||||
DISSERTATION,
|
||||
ENCYCLOPEDIA_ARTICLE,
|
||||
EDITED_BOOK,
|
||||
JOURNAL_ARTICLE,
|
||||
JOURNAL_ISSUE,
|
||||
MAGAZINE_ARTICLE,
|
||||
MANUAL,
|
||||
ONLINE_RESOURCE,
|
||||
NEWSLETTER_ARTICLE,
|
||||
NEWSPAPER_ARTICLE,
|
||||
REPORT,
|
||||
RESEARCH_TOOL,
|
||||
SUPERVISED_STUDENT_PUBLICATION,
|
||||
TEST,
|
||||
TRANSLATION,
|
||||
WEBSITE,
|
||||
CONFERENCE_ABSTRACT,
|
||||
CONFERENCE_PAPER,
|
||||
CONFERENCE_POSTER,
|
||||
DISCLOSURE,
|
||||
LICENSE,
|
||||
PATENT,
|
||||
REGISTERED_COPYRIGHT,
|
||||
ARTISTIC_PERFORMANCE,
|
||||
DATA_SET,
|
||||
INVENTION,
|
||||
LECTURE_SPEECH,
|
||||
RESEARCH_TECHNIQUE,
|
||||
SPIN_OFF_COMPANY,
|
||||
STANDARDS_AND_POLICY,
|
||||
TECHNICAL_STANDARD,
|
||||
OTHER
|
||||
|
||||
}
|
@@ -8,7 +8,13 @@
|
||||
package org.dspace.authority.orcid.xml;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.w3c.dom.Document;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
import javax.xml.bind.JAXBContext;
|
||||
import javax.xml.bind.JAXBException;
|
||||
import javax.xml.bind.Unmarshaller;
|
||||
import java.io.InputStream;
|
||||
import java.net.URISyntaxException;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -24,11 +30,15 @@ public abstract class Converter<T> {
|
||||
*/
|
||||
private static Logger log = Logger.getLogger(Converter.class);
|
||||
|
||||
public abstract T convert(InputStream document);
|
||||
|
||||
protected void processError(Document xml) {
|
||||
String errorMessage = XMLErrors.getErrorMessage(xml);
|
||||
log.error("The orcid-message reports an error: " + errorMessage);
|
||||
protected Object unmarshall(InputStream input, Class<?> type) throws SAXException, URISyntaxException {
|
||||
try {
|
||||
JAXBContext context = JAXBContext.newInstance(type);
|
||||
Unmarshaller unmarshaller = context.createUnmarshaller();
|
||||
return unmarshaller.unmarshal(input);
|
||||
} catch (JAXBException e) {
|
||||
throw new RuntimeException("Unable to unmarshall orcid message" + e);
|
||||
}
|
||||
}
|
||||
|
||||
public abstract T convert(Document document);
|
||||
}
|
||||
|
@@ -1,73 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.xml;
|
||||
|
||||
import org.dspace.authority.util.XMLUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.w3c.dom.Document;
|
||||
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class XMLErrors {
|
||||
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private static Logger log = Logger.getLogger(XMLErrors.class);
|
||||
|
||||
private static final String ERROR_DESC = "/orcid-message/error-desc";
|
||||
|
||||
/**
|
||||
* Evaluates whether a given xml document contains errors or not.
|
||||
*
|
||||
* @param xml The given xml document
|
||||
* @return true if the given xml document is null
|
||||
* or if it contains errors
|
||||
*/
|
||||
public static boolean check(Document xml) {
|
||||
|
||||
if (xml == null) {
|
||||
return true;
|
||||
}
|
||||
|
||||
String textContent = null;
|
||||
|
||||
try {
|
||||
textContent = XMLUtils.getTextContent(xml, ERROR_DESC);
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error while checking for errors in orcid message", e);
|
||||
}
|
||||
|
||||
return textContent == null;
|
||||
}
|
||||
|
||||
public static String getErrorMessage(Document xml) {
|
||||
|
||||
if (xml == null) {
|
||||
return "Did not receive an XML document.";
|
||||
}
|
||||
|
||||
String textContent = null;
|
||||
|
||||
try {
|
||||
textContent = XMLUtils.getTextContent(xml, ERROR_DESC);
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error while checking for errors in orcid message", e);
|
||||
}
|
||||
|
||||
return textContent;
|
||||
}
|
||||
|
||||
}
|
@@ -7,23 +7,22 @@
|
||||
*/
|
||||
package org.dspace.authority.orcid.xml;
|
||||
|
||||
import org.dspace.authority.orcid.model.Bio;
|
||||
import org.dspace.authority.orcid.model.BioExternalIdentifier;
|
||||
import org.dspace.authority.orcid.model.BioName;
|
||||
import org.dspace.authority.orcid.model.BioResearcherUrl;
|
||||
import org.dspace.authority.util.XMLUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Node;
|
||||
import org.w3c.dom.NodeList;
|
||||
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.authority.orcid.Orcidv2;
|
||||
import org.dspace.utils.DSpace;
|
||||
import org.orcid.jaxb.model.common_v2.OrcidId;
|
||||
import org.orcid.jaxb.model.record_v2.Person;
|
||||
import org.orcid.jaxb.model.search_v2.Result;
|
||||
import org.orcid.jaxb.model.search_v2.Search;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
@@ -36,216 +35,39 @@ public class XMLtoBio extends Converter {
|
||||
*/
|
||||
private static Logger log = Logger.getLogger(XMLtoBio.class);
|
||||
|
||||
/**
|
||||
* orcid-message XPATHs
|
||||
*/
|
||||
|
||||
protected String ORCID_BIO = "//orcid-bio";
|
||||
|
||||
// protected String ORCID = "parent::*/orcid";
|
||||
protected String ORCID = "parent::*/orcid-identifier/path";
|
||||
|
||||
protected String PERSONAL_DETAILS = "personal-details";
|
||||
protected String GIVEN_NAMES = PERSONAL_DETAILS + "/given-names";
|
||||
protected String FAMILY_NAME = PERSONAL_DETAILS + "/family-name";
|
||||
protected String CREDIT_NAME = PERSONAL_DETAILS + "/credit-name";
|
||||
protected String OTHER_NAMES = PERSONAL_DETAILS + "/other-names";
|
||||
protected String OTHER_NAME = OTHER_NAMES + "/other-name";
|
||||
|
||||
protected String CONTACT_DETAILS = "contact-details";
|
||||
protected String COUNTRY = CONTACT_DETAILS + "/address/country";
|
||||
|
||||
protected String KEYWORDS = "keywords";
|
||||
protected String KEYWORD = KEYWORDS + "/keyword";
|
||||
|
||||
protected String EXTERNAL_IDENTIFIERS = "external-identifiers";
|
||||
protected String EXTERNAL_IDENTIFIER = EXTERNAL_IDENTIFIERS + "/external-identifier";
|
||||
protected String EXTERNAL_ID_ORCID = "external-id-orcid";
|
||||
protected String EXTERNAL_ID_COMMNON_NAME = "external-id-common-name";
|
||||
protected String EXTERNAL_ID_REFERENCE = "external-id-reference";
|
||||
protected String EXTERNAL_ID_URL = "external-id-url";
|
||||
|
||||
protected String RESEARCHER_URLS = "researcher-urls";
|
||||
protected String RESEARCHER_URL = "researcher-urls/researcher-url";
|
||||
protected String URL_NAME = "url-name";
|
||||
protected String URL = "url";
|
||||
|
||||
protected String BIOGRAPHY = ORCID_BIO + "/biography";
|
||||
|
||||
protected String AFFILIATIONS = ORCID_BIO + "/affiliation";
|
||||
|
||||
/**
|
||||
* Regex
|
||||
*/
|
||||
|
||||
protected String ORCID_NOT_FOUND = "ORCID [\\d-]* not found";
|
||||
|
||||
|
||||
public List<Bio> convert(Document xml) {
|
||||
List<Bio> result = new ArrayList<Bio>();
|
||||
|
||||
if (XMLErrors.check(xml)) {
|
||||
|
||||
try {
|
||||
Iterator<Node> iterator = XMLUtils.getNodeListIterator(xml, ORCID_BIO);
|
||||
while (iterator.hasNext()) {
|
||||
Bio bio = convertBio(iterator.next());
|
||||
result.add(bio);
|
||||
}
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in xpath syntax", e);
|
||||
}
|
||||
} else {
|
||||
processError(xml);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private Bio convertBio(Node node) {
|
||||
Bio bio = new Bio();
|
||||
|
||||
setOrcid(node,bio);
|
||||
setPersonalDetails(node, bio);
|
||||
setContactDetails(node, bio);
|
||||
setKeywords(node, bio);
|
||||
setExternalIdentifiers(node, bio);
|
||||
setResearcherUrls(node, bio);
|
||||
setBiography(node, bio);
|
||||
|
||||
return bio;
|
||||
}
|
||||
|
||||
protected void processError(Document xml) {
|
||||
String errorMessage = XMLErrors.getErrorMessage(xml);
|
||||
|
||||
if(errorMessage.matches(ORCID_NOT_FOUND))
|
||||
{
|
||||
// do something?
|
||||
}
|
||||
|
||||
log.error("The orcid-message reports an error: " + errorMessage);
|
||||
}
|
||||
|
||||
|
||||
private void setOrcid(Node node, Bio bio) {
|
||||
@Override
|
||||
public List<Person> convert(InputStream xml) {
|
||||
List<Person> bios= new ArrayList<>();
|
||||
try {
|
||||
String orcid = XMLUtils.getTextContent(node, ORCID);
|
||||
bio.setOrcid(orcid);
|
||||
} catch (XPathExpressionException e) {
|
||||
log.debug("Error in finding the biography in bio xml.", e);
|
||||
}
|
||||
}
|
||||
Orcidv2 connector = new DSpace().getServiceManager().getServiceByName("AuthoritySource", Orcidv2.class);
|
||||
|
||||
protected void setBiography(Node xml, Bio bio) {
|
||||
try {
|
||||
String biography = XMLUtils.getTextContent(xml, BIOGRAPHY);
|
||||
bio.setBiography(biography);
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in finding the biography in bio xml.", e);
|
||||
}
|
||||
}
|
||||
|
||||
protected void setResearcherUrls(Node xml, Bio bio) {
|
||||
try {
|
||||
NodeList researcher_urls = XMLUtils.getNodeList(xml, RESEARCHER_URL);
|
||||
if (researcher_urls != null) {
|
||||
for (int i = 0; i < researcher_urls.getLength(); i++) {
|
||||
Node researcher_url = researcher_urls.item(i);
|
||||
if (researcher_url.getNodeType() != Node.TEXT_NODE) {
|
||||
String url_name = XMLUtils.getTextContent(researcher_url, URL_NAME);
|
||||
String url = XMLUtils.getTextContent(researcher_url, URL);
|
||||
BioResearcherUrl researcherUrl = new BioResearcherUrl(url_name, url);
|
||||
bio.addResearcherUrl(researcherUrl);
|
||||
Search search = (Search) unmarshall(xml, Search.class);
|
||||
for(Result result : search.getResult()){
|
||||
OrcidId orcidIdentifier = result.getOrcidIdentifier();
|
||||
if(orcidIdentifier!=null){
|
||||
log.debug("Found OrcidId=" + orcidIdentifier.toString());
|
||||
String orcid = orcidIdentifier.getUriPath();
|
||||
Person bio = connector.getBio(orcid);
|
||||
if(bio!=null){
|
||||
bios.add(bio);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in finding the researcher url in bio xml.", e);
|
||||
} catch (SAXException | URISyntaxException e) {
|
||||
log.error(e);
|
||||
}
|
||||
return bios;
|
||||
}
|
||||
|
||||
protected void setExternalIdentifiers(Node xml, Bio bio) {
|
||||
public Person convertSinglePerson(InputStream xml) {
|
||||
Person person = null;
|
||||
try {
|
||||
|
||||
Iterator<Node> iterator = XMLUtils.getNodeListIterator(xml, EXTERNAL_IDENTIFIER);
|
||||
while (iterator.hasNext()) {
|
||||
Node external_identifier = iterator.next();
|
||||
String id_orcid = XMLUtils.getTextContent(external_identifier, EXTERNAL_ID_ORCID);
|
||||
String id_common_name = XMLUtils.getTextContent(external_identifier, EXTERNAL_ID_COMMNON_NAME);
|
||||
String id_reference = XMLUtils.getTextContent(external_identifier, EXTERNAL_ID_REFERENCE);
|
||||
String id_url = XMLUtils.getTextContent(external_identifier, EXTERNAL_ID_URL);
|
||||
BioExternalIdentifier externalIdentifier = new BioExternalIdentifier(id_orcid, id_common_name, id_reference, id_url);
|
||||
bio.addExternalIdentifier(externalIdentifier);
|
||||
}
|
||||
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in finding the external identifier in bio xml.", e);
|
||||
}
|
||||
}
|
||||
|
||||
protected void setKeywords(Node xml, Bio bio) {
|
||||
try {
|
||||
NodeList keywords = XMLUtils.getNodeList(xml, KEYWORD);
|
||||
if (keywords != null) {
|
||||
for (int i = 0; i < keywords.getLength(); i++) {
|
||||
String keyword = keywords.item(i).getTextContent();
|
||||
String[] split = keyword.split(",");
|
||||
for (String k : split) {
|
||||
bio.addKeyword(k.trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in finding the keywords in bio xml.", e);
|
||||
}
|
||||
}
|
||||
|
||||
protected void setContactDetails(Node xml, Bio bio) {
|
||||
try {
|
||||
String country = XMLUtils.getTextContent(xml, COUNTRY);
|
||||
bio.setCountry(country);
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in finding the country in bio xml.", e);
|
||||
}
|
||||
}
|
||||
|
||||
protected void setPersonalDetails(Node xml, Bio bio) {
|
||||
BioName name = bio.getName();
|
||||
|
||||
try {
|
||||
String givenNames = XMLUtils.getTextContent(xml, GIVEN_NAMES);
|
||||
name.setGivenNames(givenNames);
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in finding the given names in bio xml.", e);
|
||||
}
|
||||
|
||||
try {
|
||||
String familyName = XMLUtils.getTextContent(xml, FAMILY_NAME);
|
||||
name.setFamilyName(familyName);
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in finding the family name in bio xml.", e);
|
||||
}
|
||||
|
||||
try {
|
||||
String creditName = XMLUtils.getTextContent(xml, CREDIT_NAME);
|
||||
name.setCreditName(creditName);
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in finding the credit name in bio xml.", e);
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
Iterator<Node> iterator = XMLUtils.getNodeListIterator(xml, OTHER_NAME);
|
||||
while (iterator.hasNext()) {
|
||||
Node otherName = iterator.next();
|
||||
String textContent = otherName.getTextContent();
|
||||
name.getOtherNames().add(textContent.trim());
|
||||
}
|
||||
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in finding the other names in bio xml.", e);
|
||||
person = (Person) unmarshall(xml, Person.class);
|
||||
return person;
|
||||
} catch (SAXException | URISyntaxException e) {
|
||||
log.error(e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -1,239 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.xml;
|
||||
|
||||
import org.dspace.authority.orcid.model.*;
|
||||
import org.dspace.authority.util.*;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Node;
|
||||
import org.w3c.dom.NodeList;
|
||||
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class XMLtoWork extends Converter {
|
||||
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private static Logger log = Logger.getLogger(XMLtoWork.class);
|
||||
|
||||
/**
|
||||
* orcid-message XPATHs
|
||||
*/
|
||||
|
||||
protected String ORCID_WORKS = "//orcid-works";
|
||||
protected String ORCID_WORK = ORCID_WORKS + "/orcid-work";
|
||||
|
||||
protected String WORK_TITLE = "work-title";
|
||||
protected String TITLE = WORK_TITLE + "/title";
|
||||
protected String SUBTITLE = WORK_TITLE + "/subtitle";
|
||||
protected String TRANSLATED_TITLES = WORK_TITLE + "/translated-title";
|
||||
protected String TRANSLATED_TITLES_LANGUAGE = "@language-code";
|
||||
|
||||
protected String SHORT_DESCRIPTION = "short-description";
|
||||
|
||||
protected String WORK_CITATION = "work-citation";
|
||||
protected String CITATION_TYPE = WORK_CITATION + "/work-citation-type";
|
||||
protected String CITATION = WORK_CITATION + "/citation";
|
||||
|
||||
protected String WORK_TYPE = "work-type";
|
||||
|
||||
protected String PUBLICATION_DATE = "publication-date";
|
||||
protected String YEAR = PUBLICATION_DATE + "/year";
|
||||
protected String MONTH = PUBLICATION_DATE + "/month";
|
||||
protected String DAY = PUBLICATION_DATE + "/day";
|
||||
|
||||
protected String WORK_EXTERNAL_IDENTIFIERS = "work-external-identifiers";
|
||||
protected String WORK_EXTERNAL_IDENTIFIER = WORK_EXTERNAL_IDENTIFIERS + "/work-external-identifier";
|
||||
protected String WORK_EXTERNAL_IDENTIFIER_TYPE = "work-external-identifier-type";
|
||||
protected String WORK_EXTERNAL_IDENTIFIER_ID = "work-external-identifier-id";
|
||||
|
||||
protected String URL = "url";
|
||||
|
||||
protected String WORK_CONTRIBUTOR = "work-contributors";
|
||||
protected String CONTRIBUTOR = WORK_CONTRIBUTOR+"/contributor";
|
||||
protected String CONTRIBUTOR_ORCID = "contributor-orcid";
|
||||
protected String CREDIT_NAME = "credit-name";
|
||||
protected String CONTRIBUTOR_EMAIL = "contributor-email";
|
||||
protected String CONTRIBUTOR_ATTRIBUTES = "contributor-attributes";
|
||||
protected String CONTRIBUTOR_SEQUENCE = "contributor-sequence";
|
||||
protected String CONTRIBUTOR_ROLE = "contributor-role";
|
||||
|
||||
protected String WORK_SOURCE = "work-source";
|
||||
|
||||
|
||||
public List<Work> convert(Document document) {
|
||||
List<Work> result = new ArrayList<Work>();
|
||||
|
||||
if (XMLErrors.check(document)) {
|
||||
|
||||
try {
|
||||
Iterator<Node> iterator = XMLUtils.getNodeListIterator(document, ORCID_WORK);
|
||||
while (iterator.hasNext()) {
|
||||
Work work = convertWork(iterator.next());
|
||||
result.add(work);
|
||||
}
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in xpath syntax", e);
|
||||
}
|
||||
} else {
|
||||
processError(document);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
protected Work convertWork(Node node) throws XPathExpressionException {
|
||||
Work work = new Work();
|
||||
setTitle(node, work);
|
||||
setDescription(node, work);
|
||||
setCitation(node, work);
|
||||
setWorkType(node, work);
|
||||
setPublicationDate(node, work);
|
||||
setExternalIdentifiers(node, work);
|
||||
setUrl(node, work);
|
||||
setContributors(node, work);
|
||||
setWorkSource(node, work);
|
||||
|
||||
return work;
|
||||
}
|
||||
|
||||
protected void setWorkSource(Node node, Work work) throws XPathExpressionException {
|
||||
String workSource = XMLUtils.getTextContent(node, WORK_SOURCE);
|
||||
work.setWorkSource(workSource);
|
||||
}
|
||||
|
||||
protected void setContributors(Node node, Work work) throws XPathExpressionException {
|
||||
|
||||
Set<Contributor> contributors = new HashSet<Contributor>();
|
||||
|
||||
Iterator<Node> iterator = XMLUtils.getNodeListIterator(node, CONTRIBUTOR);
|
||||
while (iterator.hasNext()) {
|
||||
Node nextContributorNode = iterator.next();
|
||||
String orcid = XMLUtils.getTextContent(nextContributorNode, CONTRIBUTOR_ORCID);
|
||||
String creditName = XMLUtils.getTextContent(nextContributorNode, CREDIT_NAME);
|
||||
String email = XMLUtils.getTextContent(nextContributorNode, CONTRIBUTOR_EMAIL);
|
||||
|
||||
Set<ContributorAttribute> contributorAttributes = new HashSet<ContributorAttribute>();
|
||||
NodeList attributeNodes = XMLUtils.getNodeList(nextContributorNode, CONTRIBUTOR_ATTRIBUTES);
|
||||
Iterator<Node> attributesIterator = XMLUtils.getNodeListIterator(attributeNodes);
|
||||
while (attributesIterator.hasNext()) {
|
||||
Node nextAttribute = attributesIterator.next();
|
||||
|
||||
String roleText = XMLUtils.getTextContent(nextAttribute, CONTRIBUTOR_ROLE);
|
||||
ContributorAttributeRole role = EnumUtils.lookup(ContributorAttributeRole.class, roleText);
|
||||
|
||||
String sequenceText = XMLUtils.getTextContent(nextAttribute, CONTRIBUTOR_SEQUENCE);
|
||||
ContributorAttributeSequence sequence = EnumUtils.lookup(ContributorAttributeSequence.class, sequenceText);
|
||||
|
||||
ContributorAttribute attribute = new ContributorAttribute(role, sequence);
|
||||
contributorAttributes.add(attribute);
|
||||
}
|
||||
|
||||
Contributor contributor = new Contributor(orcid, creditName, email, contributorAttributes);
|
||||
contributors.add(contributor);
|
||||
}
|
||||
|
||||
work.setContributors(contributors);
|
||||
}
|
||||
|
||||
protected void setUrl(Node node, Work work) throws XPathExpressionException {
|
||||
String url = XMLUtils.getTextContent(node, URL);
|
||||
work.setUrl(url);
|
||||
}
|
||||
|
||||
protected void setExternalIdentifiers(Node node, Work work) throws XPathExpressionException {
|
||||
|
||||
Iterator<Node> iterator = XMLUtils.getNodeListIterator(node, WORK_EXTERNAL_IDENTIFIER);
|
||||
while (iterator.hasNext()) {
|
||||
Node work_external_identifier = iterator.next();
|
||||
String typeText = XMLUtils.getTextContent(work_external_identifier, WORK_EXTERNAL_IDENTIFIER_TYPE);
|
||||
|
||||
WorkExternalIdentifierType type = EnumUtils.lookup(WorkExternalIdentifierType.class, typeText);
|
||||
|
||||
String id = XMLUtils.getTextContent(work_external_identifier, WORK_EXTERNAL_IDENTIFIER_ID);
|
||||
|
||||
WorkExternalIdentifier externalID = new WorkExternalIdentifier(type, id);
|
||||
work.setWorkExternalIdentifier(externalID);
|
||||
}
|
||||
}
|
||||
|
||||
protected void setPublicationDate(Node node, Work work) throws XPathExpressionException {
|
||||
|
||||
String year = XMLUtils.getTextContent(node, YEAR);
|
||||
String month = XMLUtils.getTextContent(node, MONTH);
|
||||
String day = XMLUtils.getTextContent(node, DAY);
|
||||
|
||||
String publicationDate = year;
|
||||
if (StringUtils.isNotBlank(month)) {
|
||||
publicationDate += "-" + month;
|
||||
if (StringUtils.isNotBlank(day)) {
|
||||
publicationDate += "-" + day;
|
||||
}
|
||||
}
|
||||
|
||||
work.setPublicationDate(publicationDate);
|
||||
}
|
||||
|
||||
protected void setWorkType(Node node, Work work) throws XPathExpressionException {
|
||||
|
||||
String workTypeText = XMLUtils.getTextContent(node, WORK_TYPE);
|
||||
WorkType workType = EnumUtils.lookup(WorkType.class, workTypeText);
|
||||
|
||||
work.setWorkType(workType);
|
||||
}
|
||||
|
||||
protected void setCitation(Node node, Work work) throws XPathExpressionException {
|
||||
|
||||
String typeText = XMLUtils.getTextContent(node, CITATION_TYPE);
|
||||
CitationType type = EnumUtils.lookup(CitationType.class, typeText);
|
||||
|
||||
String citationtext = XMLUtils.getTextContent(node, CITATION);
|
||||
|
||||
Citation citation = new Citation(type, citationtext);
|
||||
work.setCitation(citation);
|
||||
}
|
||||
|
||||
protected void setDescription(Node node, Work work) throws XPathExpressionException {
|
||||
|
||||
String description = null;
|
||||
description = XMLUtils.getTextContent(node, SHORT_DESCRIPTION);
|
||||
work.setDescription(description);
|
||||
}
|
||||
|
||||
protected void setTitle(Node node, Work work) throws XPathExpressionException {
|
||||
|
||||
String title = XMLUtils.getTextContent(node, TITLE);
|
||||
|
||||
String subtitle = XMLUtils.getTextContent(node, SUBTITLE);
|
||||
|
||||
Map<String, String> translatedTitles = new HashMap<String, String>();
|
||||
NodeList nodeList = XMLUtils.getNodeList(node, TRANSLATED_TITLES);
|
||||
Iterator<Node> iterator = XMLUtils.getNodeListIterator(nodeList);
|
||||
while (iterator.hasNext()) {
|
||||
Node languageNode = iterator.next();
|
||||
String language = XMLUtils.getTextContent(languageNode, TRANSLATED_TITLES_LANGUAGE);
|
||||
String translated_title = XMLUtils.getTextContent(languageNode, ".");
|
||||
translatedTitles.put(language, translated_title);
|
||||
}
|
||||
|
||||
WorkTitle workTitle = new WorkTitle(title, subtitle, translatedTitles);
|
||||
work.setWorkTitle(workTitle);
|
||||
}
|
||||
|
||||
}
|
@@ -7,13 +7,12 @@
|
||||
*/
|
||||
package org.dspace.authority.rest;
|
||||
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.dspace.authority.util.XMLUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.w3c.dom.Document;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.Scanner;
|
||||
@@ -38,26 +37,28 @@ public class RESTConnector {
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
public Document get(String path) {
|
||||
Document document = null;
|
||||
public InputStream get(String path, String accessToken) {
|
||||
|
||||
InputStream result = null;
|
||||
path = trimSlashes(path);
|
||||
|
||||
String fullPath = url + '/' + path;
|
||||
HttpGet httpGet = new HttpGet(fullPath);
|
||||
if(StringUtils.isNotBlank(accessToken)){
|
||||
httpGet.addHeader("Content-Type", "application/vnd.orcid+xml");
|
||||
httpGet.addHeader("Authorization","Bearer "+accessToken);
|
||||
}
|
||||
try {
|
||||
HttpClient httpClient = HttpClientBuilder.create().build();
|
||||
HttpResponse getResponse = httpClient.execute(httpGet);
|
||||
//do not close this httpClient
|
||||
result = getResponse.getEntity().getContent();
|
||||
document = XMLUtils.convertStreamToXML(result);
|
||||
|
||||
} catch (Exception e) {
|
||||
getGotError(e, fullPath);
|
||||
}
|
||||
|
||||
return document;
|
||||
return result;
|
||||
}
|
||||
|
||||
protected void getGotError(Exception e, String fullPath) {
|
||||
|
@@ -7,9 +7,7 @@
|
||||
*/
|
||||
package org.dspace.authority.rest;
|
||||
|
||||
import org.dspace.authority.AuthorityValue;
|
||||
|
||||
import java.util.List;
|
||||
import org.dspace.authority.SolrAuthorityInterface;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -18,21 +16,11 @@ import java.util.List;
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public abstract class RestSource {
|
||||
public abstract class RestSource implements SolrAuthorityInterface {
|
||||
|
||||
protected RESTConnector restConnector;
|
||||
|
||||
public RestSource(String url) {
|
||||
this.restConnector = new RESTConnector(url);
|
||||
}
|
||||
|
||||
/**
|
||||
* TODO
|
||||
* com.atmire.org.dspace.authority.rest.RestSource#queryAuthorities -> add field, so the source can decide whether to query /users or something else.
|
||||
* -> implement subclasses
|
||||
* -> implement usages
|
||||
*/
|
||||
public abstract List<AuthorityValue> queryAuthorities(String text, int max);
|
||||
|
||||
public abstract AuthorityValue queryAuthorityID(String id);
|
||||
}
|
||||
|
@@ -20,6 +20,7 @@ import org.dspace.eperson.Group;
|
||||
import org.dspace.storage.rdbms.DatabaseManager;
|
||||
import org.dspace.storage.rdbms.TableRow;
|
||||
import org.dspace.storage.rdbms.TableRowIterator;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
|
||||
/**
|
||||
* AuthorizeManager handles all authorization checks for DSpace. For better
|
||||
@@ -295,8 +296,43 @@ public class AuthorizeManager
|
||||
}
|
||||
}
|
||||
|
||||
// In case the dso is an bundle or bitstream we must ignore custom
|
||||
// policies if it does not belong to at least one installed item (see
|
||||
// DS-2614).
|
||||
// In case the dso is an item and a corresponding workspace or workflow
|
||||
// item exist, we have to ignore custom policies (see DS-2614).
|
||||
boolean ignoreCustomPolicies = false;
|
||||
if (o instanceof Bitstream)
|
||||
{
|
||||
Bitstream b = (Bitstream) o;
|
||||
|
||||
// Ensure that this is not a collection or community logo
|
||||
DSpaceObject parent = b.getParentObject();
|
||||
if (!(parent instanceof Collection) && !(parent instanceof Community))
|
||||
{
|
||||
ignoreCustomPolicies = !isAnyItemInstalled(c, b.getBundles());
|
||||
}
|
||||
}
|
||||
if (o instanceof Bundle)
|
||||
{
|
||||
ignoreCustomPolicies = !isAnyItemInstalled(c, new Bundle[] {(Bundle) o});
|
||||
}
|
||||
if (o instanceof Item)
|
||||
{
|
||||
if (WorkspaceItem.findByItem(c, (Item) o) != null ||
|
||||
WorkflowItem.findByItem(c, (Item) o) != null)
|
||||
{
|
||||
ignoreCustomPolicies = true;
|
||||
}
|
||||
}
|
||||
|
||||
for (ResourcePolicy rp : getPoliciesActionFilter(c, o, action))
|
||||
{
|
||||
if (ignoreCustomPolicies
|
||||
&& ResourcePolicy.TYPE_CUSTOM.equals(rp.getRpType()))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
// check policies for date validity
|
||||
if (rp.isDateValid())
|
||||
{
|
||||
@@ -306,7 +342,7 @@ public class AuthorizeManager
|
||||
}
|
||||
|
||||
if ((rp.getGroupID() != -1)
|
||||
&& (Group.isMember(c, rp.getGroupID())))
|
||||
&& (Group.isMember(c, e, rp.getGroupID())))
|
||||
{
|
||||
// group was set, and eperson is a member
|
||||
// of that group
|
||||
@@ -318,7 +354,26 @@ public class AuthorizeManager
|
||||
// default authorization is denial
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
// check whether any bundle belongs to any item that passed submission
|
||||
// and workflow process
|
||||
protected static boolean isAnyItemInstalled(Context ctx, Bundle[] bundles)
|
||||
throws SQLException
|
||||
{
|
||||
for (Bundle bundle : bundles)
|
||||
{
|
||||
for (Item item : bundle.getItems())
|
||||
{
|
||||
if (WorkspaceItem.findByItem(ctx, item) == null
|
||||
&& WorkflowItem.findByItem(ctx, item) == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////
|
||||
// admin check methods
|
||||
///////////////////////////////////////////////
|
||||
@@ -480,7 +535,9 @@ public class AuthorizeManager
|
||||
|
||||
rp.update();
|
||||
|
||||
c.turnOffAuthorisationSystem();
|
||||
o.updateLastModified();
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -534,8 +591,10 @@ public class AuthorizeManager
|
||||
rp.setRpType(type);
|
||||
|
||||
rp.update();
|
||||
|
||||
|
||||
c.turnOffAuthorisationSystem();
|
||||
o.updateLastModified();
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -799,7 +858,9 @@ public class AuthorizeManager
|
||||
drp.update();
|
||||
}
|
||||
|
||||
c.turnOffAuthorisationSystem();
|
||||
dest.updateLastModified();
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -815,12 +876,14 @@ public class AuthorizeManager
|
||||
public static void removeAllPolicies(Context c, DSpaceObject o)
|
||||
throws SQLException
|
||||
{
|
||||
o.updateLastModified();
|
||||
|
||||
// FIXME: authorization check?
|
||||
DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE "
|
||||
+ "resource_type_id= ? AND resource_id= ? ",
|
||||
o.getType(), o.getID());
|
||||
|
||||
c.turnOffAuthorisationSystem();
|
||||
o.updateLastModified();
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -837,7 +900,7 @@ public class AuthorizeManager
|
||||
throws SQLException
|
||||
{
|
||||
DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE "
|
||||
+ "resource_type_id= ? AND resource_id= ? AND rptype <> ? ",
|
||||
+ "resource_type_id= ? AND resource_id= ? AND (rptype <> ? OR rptype IS NULL)",
|
||||
o.getType(), o.getID(), type);
|
||||
}
|
||||
|
||||
@@ -861,6 +924,29 @@ public class AuthorizeManager
|
||||
+ "resource_type_id= ? AND resource_id= ? AND rptype=? ",
|
||||
o.getType(), o.getID(), type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Change all the policies related to the action (fromPolicy) of the
|
||||
* specified object to the new action (toPolicy)
|
||||
*
|
||||
* @param context
|
||||
* @param dso
|
||||
* the dspace object
|
||||
* @param fromAction
|
||||
* the action to change
|
||||
* @param toAction
|
||||
* the new action to set
|
||||
* @throws SQLException
|
||||
* @throws AuthorizeException
|
||||
*/
|
||||
public static void switchPoliciesAction(Context context, DSpaceObject dso, int fromAction, int toAction)
|
||||
throws SQLException, AuthorizeException {
|
||||
List<ResourcePolicy> rps = getPoliciesActionFilter(context, dso, fromAction);
|
||||
for (ResourcePolicy rp : rps) {
|
||||
rp.setAction(toAction);
|
||||
rp.update();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all policies from an object that match a given action. FIXME
|
||||
@@ -879,7 +965,6 @@ public class AuthorizeManager
|
||||
public static void removePoliciesActionFilter(Context context,
|
||||
DSpaceObject dso, int actionID) throws SQLException
|
||||
{
|
||||
dso.updateLastModified();
|
||||
if (actionID == -1)
|
||||
{
|
||||
// remove all policies from object
|
||||
@@ -891,6 +976,10 @@ public class AuthorizeManager
|
||||
"resource_id= ? AND action_id= ? ",
|
||||
dso.getType(), dso.getID(), actionID);
|
||||
}
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
dso.updateLastModified();
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -927,11 +1016,13 @@ public class AuthorizeManager
|
||||
public static void removeGroupPolicies(Context c, DSpaceObject o, Group g)
|
||||
throws SQLException
|
||||
{
|
||||
o.updateLastModified();
|
||||
|
||||
DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE "
|
||||
+ "resource_type_id= ? AND resource_id= ? AND epersongroup_id= ? ",
|
||||
o.getType(), o.getID(), g.getID());
|
||||
|
||||
c.turnOffAuthorisationSystem();
|
||||
o.updateLastModified();
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -950,10 +1041,13 @@ public class AuthorizeManager
|
||||
public static void removeEPersonPolicies(Context c, DSpaceObject o, EPerson e)
|
||||
throws SQLException
|
||||
{
|
||||
o.updateLastModified();
|
||||
DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE "
|
||||
+ "resource_type_id= ? AND resource_id= ? AND eperson_id= ? ",
|
||||
o.getType(), o.getID(), e.getID());
|
||||
|
||||
c.turnOffAuthorisationSystem();
|
||||
o.updateLastModified();
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -36,7 +36,7 @@ public class FixDefaultPolicies
|
||||
Context c = new Context();
|
||||
|
||||
// turn off authorization
|
||||
c.setIgnoreAuthorization(true);
|
||||
c.turnOffAuthorisationSystem();
|
||||
|
||||
//////////////////////
|
||||
// carnage begins here
|
||||
|
@@ -63,7 +63,7 @@ public class PolicySet
|
||||
Context c = new Context();
|
||||
|
||||
// turn off authorization
|
||||
c.setIgnoreAuthorization(true);
|
||||
c.turnOffAuthorisationSystem();
|
||||
|
||||
//////////////////////
|
||||
// carnage begins here
|
||||
|
@@ -354,7 +354,7 @@ public class BrowserScope
|
||||
*/
|
||||
public void setResultsPerPage(int resultsPerPage)
|
||||
{
|
||||
if (resultsPerPage > -1 || browseIndex.isTagCloudEnabled())
|
||||
if (resultsPerPage > -1 || (browseIndex != null && browseIndex.isTagCloudEnabled()))
|
||||
{
|
||||
this.resultsPerPage = resultsPerPage;
|
||||
}
|
||||
|
@@ -32,6 +32,8 @@ import org.dspace.utils.DSpace;
|
||||
*
|
||||
* @author Andrea Bollini (CILEA)
|
||||
* @author Adán Román Ruiz at arvo.es (bugfix)
|
||||
* @author Panagiotis Koutsourakis (National Documentation Centre) (bugfix)
|
||||
* @author Kostas Stamatis (National Documentation Centre) (bugfix)
|
||||
*
|
||||
*/
|
||||
public class SolrBrowseDAO implements BrowseDAO
|
||||
@@ -336,6 +338,22 @@ public class SolrBrowseDAO implements BrowseDAO
|
||||
addStatusFilter(query);
|
||||
query.setMaxResults(0);
|
||||
query.addFilterQueries("search.resourcetype:" + Constants.ITEM);
|
||||
|
||||
// We need to take into account the fact that we may be in a subset of the items
|
||||
if (authority != null)
|
||||
{
|
||||
query.addFilterQueries("{!field f="+facetField + "_authority_filter}"
|
||||
+ authority);
|
||||
}
|
||||
else if (this.value != null && !valuePartial)
|
||||
{
|
||||
query.addFilterQueries("{!field f="+facetField + "_value_filter}" + this.value);
|
||||
}
|
||||
else if (valuePartial)
|
||||
{
|
||||
query.addFilterQueries("{!field f="+facetField + "_partial}" + this.value);
|
||||
}
|
||||
|
||||
if (isAscending)
|
||||
{
|
||||
query.setQuery("bi_"+column + "_sort" + ": [* TO \"" + value + "\"}");
|
||||
@@ -343,6 +361,7 @@ public class SolrBrowseDAO implements BrowseDAO
|
||||
else
|
||||
{
|
||||
query.setQuery("bi_" + column + "_sort" + ": {\"" + value + "\" TO *]");
|
||||
query.addFilterQueries("-(bi_" + column + "_sort" + ":" + value + "*)");
|
||||
}
|
||||
boolean includeUnDiscoverable = itemsWithdrawn || !itemsDiscoverable;
|
||||
DiscoverResult resp = null;
|
||||
|
@@ -28,6 +28,7 @@ import org.dspace.workflow.WorkflowItem;
|
||||
import org.dspace.xmlworkflow.storedcomponents.CollectionRole;
|
||||
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.sql.PreparedStatement;
|
||||
@@ -47,7 +48,6 @@ import java.util.*;
|
||||
* effect.
|
||||
*
|
||||
* @author Robert Tansley
|
||||
* @version $Revision$
|
||||
*/
|
||||
public class Collection extends DSpaceObject
|
||||
{
|
||||
@@ -294,31 +294,48 @@ public class Collection extends DSpaceObject
|
||||
* @return the collections in the system
|
||||
* @throws SQLException
|
||||
*/
|
||||
public static Collection[] findAll(Context context) throws SQLException {
|
||||
public static Collection[] findAll(Context context) throws SQLException
|
||||
{
|
||||
TableRowIterator tri = null;
|
||||
try {
|
||||
String query = "SELECT c.* FROM collection c " +
|
||||
"LEFT JOIN metadatavalue m on (m.resource_id = c.collection_id and m.resource_type_id = ? and m.metadata_field_id = ?) ";
|
||||
if(DatabaseManager.isOracle()){
|
||||
query += " ORDER BY cast(m.text_value as varchar2(128))";
|
||||
}else{
|
||||
query += " ORDER BY m.text_value";
|
||||
}
|
||||
List<Collection> collections = null;
|
||||
List<Serializable> params = new ArrayList<Serializable>();
|
||||
StringBuffer query = new StringBuffer(
|
||||
"SELECT c.*" +
|
||||
"FROM collection c " +
|
||||
"LEFT JOIN metadatavalue m ON (" +
|
||||
"m.resource_id = c.collection_id AND " +
|
||||
"m.resource_type_id = ? AND " +
|
||||
"m.metadata_field_id = ?" +
|
||||
")"
|
||||
);
|
||||
|
||||
tri = DatabaseManager.query(context,
|
||||
query,
|
||||
Constants.COLLECTION,
|
||||
MetadataField.findByElement(context, MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(), "title", null).getFieldID()
|
||||
);
|
||||
} catch (SQLException e) {
|
||||
log.error("Find all Collections - ",e);
|
||||
throw e;
|
||||
if (DatabaseManager.isOracle())
|
||||
{
|
||||
query.append(" ORDER BY cast(m.text_value as varchar2(128))");
|
||||
}
|
||||
else
|
||||
{
|
||||
query.append(" ORDER BY m.text_value");
|
||||
}
|
||||
|
||||
List<Collection> collections = new ArrayList<Collection>();
|
||||
params.add(Constants.COLLECTION);
|
||||
params.add(
|
||||
MetadataField.findByElement(
|
||||
context,
|
||||
MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(),
|
||||
"title",
|
||||
null
|
||||
).getFieldID()
|
||||
);
|
||||
|
||||
try
|
||||
{
|
||||
tri = DatabaseManager.query(
|
||||
context, query.toString(), params.toArray()
|
||||
);
|
||||
|
||||
collections = new ArrayList<Collection>();
|
||||
|
||||
while (tri.hasNext())
|
||||
{
|
||||
TableRow row = tri.next();
|
||||
@@ -337,6 +354,11 @@ public class Collection extends DSpaceObject
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (SQLException e)
|
||||
{
|
||||
log.error("Find all Collections - ", e);
|
||||
throw e;
|
||||
}
|
||||
finally
|
||||
{
|
||||
// close the TableRowIterator to free up resources
|
||||
@@ -363,31 +385,47 @@ public class Collection extends DSpaceObject
|
||||
public static Collection[] findAll(Context context, Integer limit, Integer offset) throws SQLException
|
||||
{
|
||||
TableRowIterator tri = null;
|
||||
try{
|
||||
String query = "SELECT c.* FROM collection c " +
|
||||
"LEFT JOIN metadatavalue m on (m.resource_id = c.collection_id and m.resource_type_id = ? and m.metadata_field_id = ?) ";
|
||||
List<Collection> collections = null;
|
||||
List<Serializable> params = new ArrayList<Serializable>();
|
||||
StringBuffer query = new StringBuffer(
|
||||
"SELECT c.*" +
|
||||
"FROM collection c " +
|
||||
"LEFT JOIN metadatavalue m ON (" +
|
||||
"m.resource_id = c.collection_id AND " +
|
||||
"m.resource_type_id = ? AND " +
|
||||
"m.metadata_field_id = ?" +
|
||||
")"
|
||||
);
|
||||
|
||||
if(DatabaseManager.isOracle()){
|
||||
query += " ORDER BY cast(m.text_value as varchar2(128))";
|
||||
}else{
|
||||
query += " ORDER BY m.text_value";
|
||||
}
|
||||
query += " limit ? offset ?";
|
||||
tri = DatabaseManager.query(context,
|
||||
query,
|
||||
Constants.COLLECTION,
|
||||
MetadataField.findByElement(context, MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(), "title", null).getFieldID(),
|
||||
limit,
|
||||
offset
|
||||
);
|
||||
} catch (SQLException e) {
|
||||
log.error("Find all Collections offset/limit - ",e);
|
||||
throw e;
|
||||
if (DatabaseManager.isOracle())
|
||||
{
|
||||
query.append(" ORDER BY cast(m.text_value as varchar2(128))");
|
||||
}
|
||||
List<Collection> collections = new ArrayList<Collection>();
|
||||
else
|
||||
{
|
||||
query.append(" ORDER BY m.text_value");
|
||||
}
|
||||
|
||||
params.add(Constants.COLLECTION);
|
||||
params.add(
|
||||
MetadataField.findByElement(
|
||||
context,
|
||||
MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(),
|
||||
"title",
|
||||
null
|
||||
).getFieldID()
|
||||
);
|
||||
|
||||
DatabaseManager.applyOffsetAndLimit(query, params, offset, limit);
|
||||
|
||||
try
|
||||
{
|
||||
tri = DatabaseManager.query(
|
||||
context, query.toString(), params.toArray()
|
||||
);
|
||||
|
||||
collections = new ArrayList<Collection>();
|
||||
|
||||
while (tri.hasNext())
|
||||
{
|
||||
TableRow row = tri.next();
|
||||
@@ -406,6 +444,11 @@ public class Collection extends DSpaceObject
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (SQLException e)
|
||||
{
|
||||
log.error("Find all Collections offset/limit - ", e);
|
||||
throw e;
|
||||
}
|
||||
finally
|
||||
{
|
||||
// close the TableRowIterator to free up resources
|
||||
@@ -450,13 +493,20 @@ public class Collection extends DSpaceObject
|
||||
*/
|
||||
public ItemIterator getItems(Integer limit, Integer offset) throws SQLException
|
||||
{
|
||||
String myQuery = "SELECT item.* FROM item, collection2item WHERE "
|
||||
+ "item.item_id=collection2item.item_id AND "
|
||||
+ "collection2item.collection_id= ? "
|
||||
+ "AND item.in_archive='1' limit ? offset ?";
|
||||
List<Serializable> params = new ArrayList<Serializable>();
|
||||
StringBuffer myQuery = new StringBuffer(
|
||||
"SELECT item.* " +
|
||||
"FROM item, collection2item " +
|
||||
"WHERE item.item_id = collection2item.item_id " +
|
||||
"AND collection2item.collection_id = ? " +
|
||||
"AND item.in_archive = '1'"
|
||||
);
|
||||
|
||||
TableRowIterator rows = DatabaseManager.queryTable(ourContext, "item",
|
||||
myQuery,getID(), limit, offset);
|
||||
params.add(getID());
|
||||
DatabaseManager.applyOffsetAndLimit(myQuery, params, offset, limit);
|
||||
|
||||
TableRowIterator rows = DatabaseManager.query(ourContext,
|
||||
myQuery.toString(), params.toArray());
|
||||
|
||||
return new ItemIterator(ourContext, rows);
|
||||
}
|
||||
@@ -679,8 +729,6 @@ public class Collection extends DSpaceObject
|
||||
g.setName("COLLECTION_" + getID() + "_WORKFLOW_STEP_" + step);
|
||||
g.update();
|
||||
setWorkflowGroup(step, g);
|
||||
|
||||
AuthorizeManager.addPolicy(ourContext, this, Constants.ADD, g);
|
||||
}
|
||||
|
||||
return workflowGroup[step - 1];
|
||||
@@ -689,26 +737,82 @@ public class Collection extends DSpaceObject
|
||||
/**
|
||||
* Set the workflow group corresponding to a particular workflow step.
|
||||
* <code>null</code> can be passed in if there should be no associated
|
||||
* group for that workflow step; any existing group is NOT deleted.
|
||||
* group for that workflow step. Any existing group is NOT deleted.
|
||||
*
|
||||
* @param step
|
||||
* the workflow step (1-3)
|
||||
* @param g
|
||||
* @param newGroup
|
||||
* the new workflow group, or <code>null</code>
|
||||
* @throws java.sql.SQLException passed through.
|
||||
* @throws org.dspace.authorize.AuthorizeException passed through.
|
||||
*/
|
||||
public void setWorkflowGroup(int step, Group g)
|
||||
public void setWorkflowGroup(int step, Group newGroup)
|
||||
throws SQLException, AuthorizeException
|
||||
{
|
||||
workflowGroup[step - 1] = g;
|
||||
|
||||
if (g == null)
|
||||
Group oldGroup = getWorkflowGroup(step);
|
||||
String stepColumn;
|
||||
int action;
|
||||
switch(step)
|
||||
{
|
||||
collectionRow.setColumnNull("workflow_step_" + step);
|
||||
case 1:
|
||||
action = Constants.WORKFLOW_STEP_1;
|
||||
stepColumn = "workflow_step_1";
|
||||
break;
|
||||
case 2:
|
||||
action = Constants.WORKFLOW_STEP_2;
|
||||
stepColumn = "workflow_step_2";
|
||||
break;
|
||||
case 3:
|
||||
action = Constants.WORKFLOW_STEP_3;
|
||||
stepColumn = "workflow_step_3";
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Illegal step count: " + step);
|
||||
}
|
||||
workflowGroup[step-1] = newGroup;
|
||||
if (newGroup != null)
|
||||
collectionRow.setColumn(stepColumn, newGroup.getID());
|
||||
else
|
||||
{
|
||||
collectionRow.setColumn("workflow_step_" + step, g.getID());
|
||||
}
|
||||
collectionRow.setColumnNull(stepColumn);
|
||||
modified = true;
|
||||
|
||||
// Deal with permissions.
|
||||
try {
|
||||
ourContext.turnOffAuthorisationSystem();
|
||||
// remove the policies for the old group
|
||||
if (oldGroup != null)
|
||||
{
|
||||
List<ResourcePolicy> oldPolicies = AuthorizeManager
|
||||
.getPoliciesActionFilter(ourContext, this, action);
|
||||
int oldGroupID = oldGroup.getID();
|
||||
for (ResourcePolicy rp : oldPolicies)
|
||||
{
|
||||
if (rp.getGroupID() == oldGroupID)
|
||||
rp.delete();
|
||||
}
|
||||
|
||||
oldPolicies = AuthorizeManager
|
||||
.getPoliciesActionFilter(ourContext, this, Constants.ADD);
|
||||
for (ResourcePolicy rp : oldPolicies)
|
||||
{
|
||||
if ((rp.getGroupID() == oldGroupID)
|
||||
&& ResourcePolicy.TYPE_WORKFLOW.equals(rp.getRpType()))
|
||||
rp.delete();
|
||||
}
|
||||
}
|
||||
|
||||
// New group can be null to delete workflow step.
|
||||
// We need to grant permissions if new group is not null.
|
||||
if (newGroup != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(ourContext, this, action, newGroup,
|
||||
ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(ourContext, this, Constants.ADD, newGroup,
|
||||
ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
} finally {
|
||||
ourContext.restoreAuthSystemState();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1513,7 +1617,7 @@ public class Collection extends DSpaceObject
|
||||
|
||||
public static Collection[] findAuthorizedOptimized(Context context, int actionID) throws java.sql.SQLException
|
||||
{
|
||||
if(! ConfigurationManager.getBooleanProperty("org.dspace.content.Collection.findAuthorizedPerformanceOptimize", true)) {
|
||||
if(! ConfigurationManager.getBooleanProperty("org.dspace.content.Collection.findAuthorizedPerformanceOptimize", false)) {
|
||||
// Fallback to legacy query if config says so. The rationale could be that a site found a bug.
|
||||
return findAuthorized(context, null, actionID);
|
||||
}
|
||||
|
@@ -280,7 +280,7 @@ public class Community extends DSpaceObject
|
||||
{
|
||||
while (tri.hasNext())
|
||||
{
|
||||
TableRow row = tri.next();
|
||||
TableRow row = tri.next(context);
|
||||
|
||||
// First check the cache
|
||||
Community fromCache = (Community) context.fromCache(
|
||||
@@ -350,7 +350,7 @@ public class Community extends DSpaceObject
|
||||
{
|
||||
while (tri.hasNext())
|
||||
{
|
||||
TableRow row = tri.next();
|
||||
TableRow row = tri.next(context);
|
||||
|
||||
// First check the cache
|
||||
Community fromCache = (Community) context.fromCache(
|
||||
@@ -683,7 +683,7 @@ public class Community extends DSpaceObject
|
||||
{
|
||||
while (tri.hasNext())
|
||||
{
|
||||
TableRow row = tri.next();
|
||||
TableRow row = tri.next(ourContext);
|
||||
|
||||
// First check the cache
|
||||
Collection fromCache = (Collection) ourContext.fromCache(
|
||||
@@ -757,7 +757,7 @@ public class Community extends DSpaceObject
|
||||
{
|
||||
while (tri.hasNext())
|
||||
{
|
||||
TableRow row = tri.next();
|
||||
TableRow row = tri.next(ourContext);
|
||||
|
||||
// First check the cache
|
||||
Community fromCache = (Community) ourContext.fromCache(
|
||||
@@ -812,7 +812,7 @@ public class Community extends DSpaceObject
|
||||
{
|
||||
if (tri.hasNext())
|
||||
{
|
||||
TableRow row = tri.next();
|
||||
TableRow row = tri.next(ourContext);
|
||||
|
||||
// First check the cache
|
||||
Community fromCache = (Community) ourContext.fromCache(
|
||||
|
@@ -60,6 +60,8 @@ public class InstallItem
|
||||
IOException, AuthorizeException
|
||||
{
|
||||
Item item = is.getItem();
|
||||
Collection collection = is.getCollection();
|
||||
|
||||
IdentifierService identifierService = new DSpace().getSingletonService(IdentifierService.class);
|
||||
try {
|
||||
if(suppliedHandle == null)
|
||||
@@ -74,7 +76,15 @@ public class InstallItem
|
||||
|
||||
populateMetadata(c, item);
|
||||
|
||||
return finishItem(c, item, is);
|
||||
// Finish up / archive the item
|
||||
item = finishItem(c, item, is);
|
||||
|
||||
// As this is a BRAND NEW item, as a final step we need to remove the
|
||||
// submitter item policies created during deposit and replace them with
|
||||
// the default policies from the collection.
|
||||
item.inheritCollectionDefaultPolicies(collection);
|
||||
|
||||
return item;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -205,8 +215,18 @@ public class InstallItem
|
||||
item.addDC("description", "provenance", "en", provDescription);
|
||||
}
|
||||
|
||||
// final housekeeping when adding new Item to archive
|
||||
// common between installing and "restoring" items.
|
||||
/**
|
||||
* Final housekeeping when adding a new Item into the archive.
|
||||
* This method is used by *both* installItem() and restoreItem(),
|
||||
* so all actions here will be run for a newly added item or a restored item.
|
||||
*
|
||||
* @param c DSpace Context
|
||||
* @param item Item in question
|
||||
* @param is InProgressSubmission object
|
||||
* @return final "archived" Item
|
||||
* @throws SQLException if database error
|
||||
* @throws AuthorizeException if authorization error
|
||||
*/
|
||||
private static Item finishItem(Context c, Item item, InProgressSubmission is)
|
||||
throws SQLException, IOException, AuthorizeException
|
||||
{
|
||||
@@ -229,10 +249,6 @@ public class InstallItem
|
||||
// remove in-progress submission
|
||||
is.deleteWrapper();
|
||||
|
||||
// remove the item's policies and replace them with
|
||||
// the defaults from the collection
|
||||
item.inheritCollectionDefaultPolicies(is.getCollection());
|
||||
|
||||
// set embargo lift date and take away read access if indicated.
|
||||
EmbargoManager.setEmbargo(c, item);
|
||||
|
||||
|
@@ -24,14 +24,15 @@ import org.dspace.authorize.AuthorizeManager;
|
||||
import org.dspace.authorize.ResourcePolicy;
|
||||
import org.dspace.browse.BrowseException;
|
||||
import org.dspace.browse.IndexBrowse;
|
||||
import org.dspace.content.authority.ChoiceAuthorityManager;
|
||||
import org.dspace.content.authority.Choices;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.content.authority.Choices;
|
||||
import org.dspace.content.authority.ChoiceAuthorityManager;
|
||||
import org.dspace.event.Event;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.event.Event;
|
||||
import org.dspace.handle.HandleManager;
|
||||
import org.dspace.identifier.IdentifierException;
|
||||
import org.dspace.identifier.IdentifierService;
|
||||
@@ -40,6 +41,8 @@ import org.dspace.storage.rdbms.TableRow;
|
||||
import org.dspace.storage.rdbms.TableRowIterator;
|
||||
import org.dspace.utils.DSpace;
|
||||
import org.dspace.versioning.VersioningService;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
|
||||
|
||||
/**
|
||||
* Class representing an item in DSpace.
|
||||
@@ -263,7 +266,7 @@ public class Item extends DSpaceObject
|
||||
}
|
||||
|
||||
String query = "SELECT item.* FROM metadatavalue,item WHERE item.in_archive='1' " +
|
||||
"AND item.item_id = metadatavalue.item_id AND metadata_field_id = ?";
|
||||
"AND item.item_id = metadatavalue.resource_id AND metadatavalue.resource_type_id=2 AND metadata_field_id = ?";
|
||||
TableRowIterator rows = null;
|
||||
if (Item.ANY.equals(authority)) {
|
||||
rows = DatabaseManager.queryTable(context, "item", query, mdf.getFieldID());
|
||||
@@ -1158,8 +1161,14 @@ public class Item extends DSpaceObject
|
||||
ourContext.addEvent(new Event(Event.MODIFY, Constants.ITEM, getID(),
|
||||
"WITHDRAW", getIdentifiers(ourContext)));
|
||||
|
||||
// remove all authorization policies, saving the custom ones
|
||||
AuthorizeManager.removeAllPoliciesByDSOAndTypeNotEqualsTo(ourContext, this, ResourcePolicy.TYPE_CUSTOM);
|
||||
// switch all READ authorization policies to WITHDRAWN_READ
|
||||
AuthorizeManager.switchPoliciesAction(ourContext, this, Constants.READ, Constants.WITHDRAWN_READ);
|
||||
for (Bundle bnd : this.getBundles()) {
|
||||
AuthorizeManager.switchPoliciesAction(ourContext, bnd, Constants.READ, Constants.WITHDRAWN_READ);
|
||||
for (Bitstream bs : bnd.getBitstreams()) {
|
||||
AuthorizeManager.switchPoliciesAction(ourContext, bs, Constants.READ, Constants.WITHDRAWN_READ);
|
||||
}
|
||||
}
|
||||
|
||||
// Write log
|
||||
log.info(LogManager.getHeader(ourContext, "withdraw_item", "user="
|
||||
@@ -1217,16 +1226,28 @@ public class Item extends DSpaceObject
|
||||
ourContext.addEvent(new Event(Event.MODIFY, Constants.ITEM, getID(),
|
||||
"REINSTATE", getIdentifiers(ourContext)));
|
||||
|
||||
// authorization policies
|
||||
if (colls.length > 0)
|
||||
{
|
||||
// FIXME: not multiple inclusion friendly - just apply access
|
||||
// policies from first collection
|
||||
// remove the item's policies and replace them with
|
||||
// the defaults from the collection
|
||||
inheritCollectionDefaultPolicies(colls[0]);
|
||||
// restore all WITHDRAWN_READ authorization policies back to READ
|
||||
for (Bundle bnd : this.getBundles()) {
|
||||
AuthorizeManager.switchPoliciesAction(ourContext, bnd, Constants.WITHDRAWN_READ, Constants.READ);
|
||||
for (Bitstream bs : bnd.getBitstreams()) {
|
||||
AuthorizeManager.switchPoliciesAction(ourContext, bs, Constants.WITHDRAWN_READ, Constants.READ);
|
||||
}
|
||||
}
|
||||
|
||||
// check if the item was withdrawn before the fix DS-3097
|
||||
if (AuthorizeManager.getPoliciesActionFilter(ourContext, this, Constants.WITHDRAWN_READ).size() != 0) {
|
||||
AuthorizeManager.switchPoliciesAction(ourContext, this, Constants.WITHDRAWN_READ, Constants.READ);
|
||||
}
|
||||
|
||||
else {
|
||||
// authorization policies
|
||||
if (colls.length > 0)
|
||||
{
|
||||
// remove the item's policies and replace them with
|
||||
// the defaults from the collection
|
||||
adjustItemPolicies(getOwningCollection());
|
||||
}
|
||||
}
|
||||
|
||||
// Write log
|
||||
log.info(LogManager.getHeader(ourContext, "reinstate_item", "user="
|
||||
+ e.getEmail() + ",item_id=" + getID()));
|
||||
@@ -1254,9 +1275,6 @@ public class Item extends DSpaceObject
|
||||
log.info(LogManager.getHeader(ourContext, "delete_item", "item_id="
|
||||
+ getID()));
|
||||
|
||||
// Remove from cache
|
||||
ourContext.removeCached(this, getID());
|
||||
|
||||
// Remove from browse indices, if appropriate
|
||||
/** XXX FIXME
|
||||
** Although all other Browse index updates are managed through
|
||||
@@ -1303,6 +1321,8 @@ public class Item extends DSpaceObject
|
||||
// remove version attached to the item
|
||||
removeVersion();
|
||||
|
||||
// Remove from cache
|
||||
ourContext.removeCached(this, getID());
|
||||
|
||||
// Finally remove item row
|
||||
DatabaseManager.delete(ourContext, itemRow);
|
||||
@@ -1750,7 +1770,12 @@ public class Item extends DSpaceObject
|
||||
// is this collection not yet created, and an item template is created
|
||||
if (getOwningCollection() == null)
|
||||
{
|
||||
return true;
|
||||
if (!isInProgressSubmission()) {
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// is this person an COLLECTION_EDITOR for the owning collection?
|
||||
@@ -1762,6 +1787,20 @@ public class Item extends DSpaceObject
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the item is an inprogress submission
|
||||
* @param context
|
||||
* @param item
|
||||
* @return <code>true</code> if the item is an inprogress submission, i.e. a WorkspaceItem or WorkflowItem
|
||||
* @throws SQLException
|
||||
*/
|
||||
public boolean isInProgressSubmission() throws SQLException {
|
||||
return WorkspaceItem.findByItem(ourContext, this) != null ||
|
||||
((ConfigurationManager.getProperty("workflow", "workflow.framework").equals("xmlworkflow")
|
||||
&& XmlWorkflowItem.findByItem(ourContext, this) != null)
|
||||
|| WorkflowItem.findByItem(ourContext, this) != null);
|
||||
}
|
||||
|
||||
public String getName()
|
||||
{
|
||||
return getMetadataFirstValue(MetadataSchema.DC_SCHEMA, "title", null, Item.ANY);
|
||||
|
@@ -81,6 +81,7 @@ public class WorkspaceItem implements InProgressSubmission
|
||||
* ID of the workspace item
|
||||
*
|
||||
* @return the workspace item, or null if the ID is invalid.
|
||||
* @throws java.sql.SQLException passed through.
|
||||
*/
|
||||
public static WorkspaceItem find(Context context, int id)
|
||||
throws SQLException
|
||||
@@ -131,6 +132,9 @@ public class WorkspaceItem implements InProgressSubmission
|
||||
* of the collection's template item
|
||||
*
|
||||
* @return the newly created workspace item
|
||||
* @throws org.dspace.authorize.AuthorizeException passed through.
|
||||
* @throws java.sql.SQLException passed through.
|
||||
* @throws java.io.IOException passed through.
|
||||
*/
|
||||
public static WorkspaceItem create(Context c, Collection coll,
|
||||
boolean template) throws AuthorizeException, SQLException,
|
||||
@@ -140,98 +144,49 @@ public class WorkspaceItem implements InProgressSubmission
|
||||
AuthorizeManager.authorizeAction(c, coll, Constants.ADD);
|
||||
|
||||
// Create an item
|
||||
Item i = Item.create(c);
|
||||
i.setSubmitter(c.getCurrentUser());
|
||||
Item item = Item.create(c);
|
||||
item.setSubmitter(c.getCurrentUser());
|
||||
|
||||
// Now create the policies for the submitter and workflow
|
||||
// users to modify item and contents
|
||||
// Now create the policies for the submitter to modify item and contents.
|
||||
// contents = bitstreams, bundles
|
||||
// FIXME: icky hardcoded workflow steps
|
||||
Group step1group = coll.getWorkflowGroup(1);
|
||||
Group step2group = coll.getWorkflowGroup(2);
|
||||
Group step3group = coll.getWorkflowGroup(3);
|
||||
|
||||
EPerson e = c.getCurrentUser();
|
||||
EPerson submitter = c.getCurrentUser();
|
||||
|
||||
// read permission
|
||||
AuthorizeManager.addPolicy(c, i, Constants.READ, e, ResourcePolicy.TYPE_SUBMISSION);
|
||||
// Add policies for the submitter
|
||||
AuthorizeManager.addPolicy(c, item, Constants.READ, submitter, ResourcePolicy.TYPE_SUBMISSION);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.WRITE, submitter, ResourcePolicy.TYPE_SUBMISSION);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.ADD, submitter, ResourcePolicy.TYPE_SUBMISSION);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.REMOVE, submitter, ResourcePolicy.TYPE_SUBMISSION);
|
||||
|
||||
|
||||
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("originalworkflow")) {
|
||||
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("originalworkflow"))
|
||||
{
|
||||
// Add policies for the workflow step administrative groups
|
||||
if (step1group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.READ, step1group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.READ, step1group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.WRITE, step1group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.ADD, step1group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.REMOVE, step1group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
|
||||
if (step2group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.READ, step2group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.READ, step2group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.WRITE, step2group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.ADD, step2group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.REMOVE, step2group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
|
||||
if (step3group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.READ, step3group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// write permission
|
||||
AuthorizeManager.addPolicy(c, i, Constants.WRITE, e, ResourcePolicy.TYPE_SUBMISSION);
|
||||
|
||||
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("originalworkflow")) {
|
||||
if (step1group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.WRITE, step1group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
|
||||
if (step2group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.WRITE, step2group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
|
||||
if (step3group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.WRITE, step3group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
}
|
||||
|
||||
// add permission
|
||||
AuthorizeManager.addPolicy(c, i, Constants.ADD, e, ResourcePolicy.TYPE_SUBMISSION);
|
||||
|
||||
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("originalworkflow")) {
|
||||
if (step1group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.ADD, step1group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
|
||||
if (step2group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.ADD, step2group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
|
||||
if (step3group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.ADD, step3group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
}
|
||||
|
||||
// remove contents permission
|
||||
AuthorizeManager.addPolicy(c, i, Constants.REMOVE, e, ResourcePolicy.TYPE_SUBMISSION);
|
||||
|
||||
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("originalworkflow")) {
|
||||
if (step1group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.REMOVE, step1group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
|
||||
if (step2group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.REMOVE, step2group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
|
||||
if (step3group != null)
|
||||
{
|
||||
AuthorizeManager.addPolicy(c, i, Constants.REMOVE, step3group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.READ, step3group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.WRITE, step3group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.ADD, step3group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
AuthorizeManager.addPolicy(c, item, Constants.REMOVE, step3group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -244,22 +199,22 @@ public class WorkspaceItem implements InProgressSubmission
|
||||
|
||||
for (int n = 0; n < md.length; n++)
|
||||
{
|
||||
i.addMetadata(md[n].schema, md[n].element, md[n].qualifier, md[n].language,
|
||||
item.addMetadata(md[n].schema, md[n].element, md[n].qualifier, md[n].language,
|
||||
md[n].value);
|
||||
}
|
||||
}
|
||||
|
||||
i.update();
|
||||
item.update();
|
||||
|
||||
// Create the workspace item row
|
||||
TableRow row = DatabaseManager.row("workspaceitem");
|
||||
|
||||
row.setColumn("item_id", i.getID());
|
||||
row.setColumn("item_id", item.getID());
|
||||
row.setColumn("collection_id", coll.getID());
|
||||
|
||||
log.info(LogManager.getHeader(c, "create_workspace_item",
|
||||
"workspace_item_id=" + row.getIntColumn("workspace_item_id")
|
||||
+ "item_id=" + i.getID() + "collection_id="
|
||||
+ "item_id=" + item.getID() + "collection_id="
|
||||
+ coll.getID()));
|
||||
|
||||
DatabaseManager.insert(c, row);
|
||||
@@ -280,6 +235,7 @@ public class WorkspaceItem implements InProgressSubmission
|
||||
* the eperson
|
||||
*
|
||||
* @return the corresponding workspace items
|
||||
* @throws java.sql.SQLException passed through.
|
||||
*/
|
||||
public static WorkspaceItem[] findByEPerson(Context context, EPerson ep)
|
||||
throws SQLException
|
||||
@@ -332,6 +288,7 @@ public class WorkspaceItem implements InProgressSubmission
|
||||
* the collection
|
||||
*
|
||||
* @return the corresponding workspace items
|
||||
* @throws java.sql.SQLException passed through.
|
||||
*/
|
||||
public static WorkspaceItem[] findByCollection(Context context, Collection c)
|
||||
throws SQLException
|
||||
@@ -384,6 +341,7 @@ public class WorkspaceItem implements InProgressSubmission
|
||||
* the item
|
||||
*
|
||||
* @return workflow item corresponding to the item, or null
|
||||
* @throws java.sql.SQLException passed through.
|
||||
*/
|
||||
public static WorkspaceItem findByItem(Context context, Item i)
|
||||
throws SQLException
|
||||
@@ -408,6 +366,7 @@ public class WorkspaceItem implements InProgressSubmission
|
||||
* @param context the context object
|
||||
*
|
||||
* @return all workspace items
|
||||
* @throws java.sql.SQLException passed through.
|
||||
*/
|
||||
public static WorkspaceItem[] findAll(Context context)
|
||||
throws SQLException
|
||||
@@ -505,6 +464,8 @@ public class WorkspaceItem implements InProgressSubmission
|
||||
|
||||
/**
|
||||
* Update the workspace item, including the unarchived item.
|
||||
* @throws java.sql.SQLException passed through.
|
||||
* @throws org.dspace.authorize.AuthorizeException passed through.
|
||||
*/
|
||||
public void update() throws SQLException, AuthorizeException
|
||||
{
|
||||
@@ -554,6 +515,10 @@ public class WorkspaceItem implements InProgressSubmission
|
||||
* Delete the workspace item. The entry in workspaceitem, the unarchived
|
||||
* item and its contents are all removed (multiple inclusion
|
||||
* notwithstanding.)
|
||||
* @throws java.sql.SQLException passed through.
|
||||
* @throws org.dspace.authorize.AuthorizeException
|
||||
* if not original submitter or an administrator.
|
||||
* @throws java.io.IOException passed through.
|
||||
*/
|
||||
public void deleteAll() throws SQLException, AuthorizeException,
|
||||
IOException
|
||||
|
@@ -7,9 +7,6 @@
|
||||
*/
|
||||
package org.dspace.content.authority;
|
||||
|
||||
import org.dspace.authority.AuthoritySearchService;
|
||||
import org.dspace.authority.AuthorityValue;
|
||||
import org.dspace.authority.rest.RestSource;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
@@ -17,6 +14,9 @@ import org.apache.solr.client.solrj.response.QueryResponse;
|
||||
import org.apache.solr.common.SolrDocument;
|
||||
import org.apache.solr.common.SolrDocumentList;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.dspace.authority.AuthoritySearchService;
|
||||
import org.dspace.authority.AuthorityValue;
|
||||
import org.dspace.authority.SolrAuthorityInterface;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.utils.DSpace;
|
||||
|
||||
@@ -35,7 +35,7 @@ import java.util.Map;
|
||||
public class SolrAuthority implements ChoiceAuthority {
|
||||
|
||||
private static final Logger log = Logger.getLogger(SolrAuthority.class);
|
||||
private RestSource source = new DSpace().getServiceManager().getServiceByName("AuthoritySource", RestSource.class);
|
||||
protected SolrAuthorityInterface source = new DSpace().getServiceManager().getServiceByName("AuthoritySource", SolrAuthorityInterface.class);
|
||||
private boolean externalResults = false;
|
||||
|
||||
public Choices getMatches(String field, String text, int collection, int start, int limit, String locale, boolean bestMatch) {
|
||||
|
@@ -0,0 +1,44 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.content.crosswalk;
|
||||
|
||||
import org.dspace.core.Context;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
/**
|
||||
* Created by jonas - jonas@atmire.com on 21/04/17.
|
||||
* Implementation of the {@link DisseminationCrosswalk} interface that enables the ability to set a Context manually
|
||||
*/
|
||||
public abstract class ContextAwareDisseminationCrosswalk implements DisseminationCrosswalk{
|
||||
|
||||
private Context context;
|
||||
private boolean contextCreatedInternally = false;
|
||||
|
||||
public void setContext(Context context){
|
||||
this.context = context;
|
||||
}
|
||||
public Context getContext() throws SQLException {
|
||||
if(context == null|| !context.isValid()){
|
||||
context=new Context();
|
||||
contextCreatedInternally = true;
|
||||
}
|
||||
return context;
|
||||
}
|
||||
|
||||
public void handleContextCleanup() throws SQLException {
|
||||
if(contextCreatedInternally){
|
||||
context.complete();
|
||||
}else{
|
||||
context.commit();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
@@ -27,7 +27,10 @@ import org.dspace.license.CreativeCommons;
|
||||
*
|
||||
* @author Larry Stone
|
||||
* @version $Revision: 1.0 $
|
||||
*
|
||||
* @deprecated to make uniforme JSPUI and XMLUI approach the bitstream with the license in the textual format it is no longer stored see https://jira.duraspace.org/browse/DS-2604
|
||||
*/
|
||||
@Deprecated
|
||||
public class CreativeCommonsTextStreamDisseminationCrosswalk
|
||||
implements StreamDisseminationCrosswalk
|
||||
{
|
||||
|
@@ -7,15 +7,15 @@
|
||||
*/
|
||||
package org.dspace.content.crosswalk;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.jdom.Element;
|
||||
import org.jdom.Namespace;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Dissemination Crosswalk plugin -- translate DSpace native
|
||||
* metadata into an external XML format.
|
||||
@@ -107,6 +107,9 @@ public interface DisseminationCrosswalk
|
||||
* Execute crosswalk, returning one XML root element as
|
||||
* a JDOM <code>Element</code> object.
|
||||
* This is typically the root element of a document.
|
||||
* Note that, if the implementing class is of type "{@link org.dspace.content.crosswalk.ContextAwareDisseminationCrosswalk}"
|
||||
* and a context is present in the method call, you should set the context before calling this method. -> "{@link org.dspace.content.crosswalk.ContextAwareDisseminationCrosswalk#setContext(org.dspace.core.Context)}"
|
||||
* The implementing class should then use the "{@link ContextAwareDisseminationCrosswalk#getContext()}" and "{@link ContextAwareDisseminationCrosswalk#handleContextCleanup()}" to retrieve and commit/complete the context respectively
|
||||
* <p>
|
||||
*
|
||||
* @param dso the DSpace Object whose metadata to export.
|
||||
|
@@ -7,17 +7,6 @@
|
||||
*/
|
||||
package org.dspace.content.crosswalk;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.text.ParseException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Date;
|
||||
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.logging.Level;
|
||||
import org.apache.commons.lang.ArrayUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
@@ -33,6 +22,12 @@ import org.dspace.eperson.Group;
|
||||
import org.jdom.Element;
|
||||
import org.jdom.Namespace;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* METSRights Ingestion & Dissemination Crosswalk
|
||||
* <p>
|
||||
@@ -51,7 +46,7 @@ import org.jdom.Namespace;
|
||||
* @author Tim Donohue
|
||||
* @version $Revision: 2108 $
|
||||
*/
|
||||
public class METSRightsCrosswalk
|
||||
public class METSRightsCrosswalk extends ContextAwareDisseminationCrosswalk
|
||||
implements IngestionCrosswalk, DisseminationCrosswalk
|
||||
{
|
||||
/** log4j category */
|
||||
@@ -120,14 +115,14 @@ public class METSRightsCrosswalk
|
||||
* METSRights PermissionTypes.
|
||||
*
|
||||
* @param dso DSpace Object
|
||||
* @param context Context Object
|
||||
* @return XML Element corresponding to the new <RightsDeclarationMD> translation
|
||||
* @throws CrosswalkException
|
||||
* @throws IOException
|
||||
* @throws SQLException
|
||||
* @throws AuthorizeException
|
||||
*/
|
||||
@Override
|
||||
public Element disseminateElement(DSpaceObject dso)
|
||||
public Element disseminateElement(Context context,DSpaceObject dso)
|
||||
throws CrosswalkException,
|
||||
IOException, SQLException, AuthorizeException
|
||||
{
|
||||
@@ -156,7 +151,6 @@ public class METSRightsCrosswalk
|
||||
// what those rights are -- too many types of content can be stored in DSpace
|
||||
|
||||
//Get all policies on this DSpace Object
|
||||
Context context = new Context();
|
||||
List<ResourcePolicy> policies = AuthorizeManager.getPolicies(context, dso);
|
||||
|
||||
//For each DSpace policy
|
||||
@@ -282,9 +276,31 @@ public class METSRightsCrosswalk
|
||||
|
||||
}//end for each policy
|
||||
|
||||
context.complete();
|
||||
return rightsMD;
|
||||
}
|
||||
/**
|
||||
* Actually Disseminate into METSRights schema. This method locates all DSpace
|
||||
* policies (permissions) for the provided object, and translates them into
|
||||
* METSRights PermissionTypes.
|
||||
*
|
||||
* @param dso DSpace Object
|
||||
* @return XML Element corresponding to the new <RightsDeclarationMD> translation
|
||||
* @throws CrosswalkException
|
||||
* @throws IOException
|
||||
* @throws SQLException
|
||||
* @throws AuthorizeException
|
||||
* @deprecated Do not use this method, please opt for "{@link #disseminateElement(Context context, DSpaceObject dso)}" instead, as this does not internally need to create a new Context
|
||||
*/
|
||||
@Override
|
||||
@Deprecated
|
||||
public Element disseminateElement(DSpaceObject dso)
|
||||
throws CrosswalkException,
|
||||
IOException, SQLException, AuthorizeException {
|
||||
Context context = getContext();
|
||||
Element element = disseminateElement(context, dso);
|
||||
handleContextCleanup();
|
||||
return element;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Element> disseminateList(DSpaceObject dso)
|
||||
@@ -435,134 +451,81 @@ public class METSRightsCrosswalk
|
||||
public void ingest(Context context, DSpaceObject dso, List<Element> ml)
|
||||
throws CrosswalkException, IOException, SQLException, AuthorizeException
|
||||
{
|
||||
// we cannot crosswalk METSRights to a SITE object
|
||||
// SITE objects are not supported by the METSRightsCrosswalk
|
||||
if (dso.getType() == Constants.SITE)
|
||||
{
|
||||
throw new CrosswalkObjectNotSupported("Wrong target object type, METSRightsCrosswalk cannot crosswalk a SITE object.");
|
||||
}
|
||||
|
||||
//First, clear all existing Policies on this DSpace Object
|
||||
// as we don't want them to conflict with policies we will be adding
|
||||
if(!ml.isEmpty())
|
||||
// If we're fed the top-level <RightsDeclarationMD> wrapper element, recurse into its guts.
|
||||
// What we need to analyze are the <Context> elements underneath it.
|
||||
if(!ml.isEmpty() && ml.get(0).getName().equals("RightsDeclarationMD"))
|
||||
{
|
||||
AuthorizeManager.removeAllPolicies(context, dso);
|
||||
ingest(context, dso, ml.get(0).getChildren());
|
||||
}
|
||||
|
||||
// Loop through each Element in the List
|
||||
List<ResourcePolicy> policies = new ArrayList<ResourcePolicy>();
|
||||
for (Element element : ml)
|
||||
else
|
||||
{
|
||||
// if we're fed a <RightsDeclarationMD> wrapper object, recurse on its guts:
|
||||
if (element.getName().equals("RightsDeclarationMD"))
|
||||
// Loop through each <Context> Element in the passed in List, creating a ResourcePolicy for each
|
||||
List<ResourcePolicy> policies = new ArrayList<>();
|
||||
for (Element element : ml)
|
||||
{
|
||||
ingest(context, dso, element.getChildren());
|
||||
}
|
||||
// "Context" section (where permissions are stored)
|
||||
else if (element.getName().equals("Context"))
|
||||
{
|
||||
//get what class of context this is
|
||||
String contextClass = element.getAttributeValue("CONTEXTCLASS");
|
||||
|
||||
if ((element.getAttributeValue("start-date") != null)
|
||||
|| (element.getAttributeValue("end-date") != null)
|
||||
|| (element.getAttributeValue("rpName") != null))
|
||||
{
|
||||
SimpleDateFormat sdf = new SimpleDateFormat( "yyyy-MM-dd" );
|
||||
try {
|
||||
ResourcePolicy rp = ResourcePolicy.create(context);
|
||||
if (element.getAttributeValue("CONTEXTCLASS").equalsIgnoreCase("GENERAL PUBLIC")) {
|
||||
Group anonGroup = Group.find(context, 0);
|
||||
rp.setGroup(anonGroup);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (element.getAttributeValue("CONTEXTCLASS").equalsIgnoreCase("REPOSITORY MGR")) {
|
||||
Group adminGroup = Group.find(context, 1);
|
||||
rp.setGroup(adminGroup);
|
||||
}
|
||||
}
|
||||
if (element.getAttributeValue("rpName") != null)
|
||||
{
|
||||
rp.setRpName(element.getAttributeValue("rpName"));
|
||||
}
|
||||
try {
|
||||
if (element.getAttributeValue("start-date") != null)
|
||||
{
|
||||
rp.setStartDate(sdf.parse(element.getAttributeValue("start-date")));
|
||||
}
|
||||
if (element.getAttributeValue("end-date") != null)
|
||||
{
|
||||
rp.setEndDate(sdf.parse(element.getAttributeValue("end-date")));
|
||||
}
|
||||
}catch (ParseException ex) {
|
||||
java.util.logging.Logger.getLogger(METSRightsCrosswalk.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
|
||||
List<Element> le = new ArrayList<Element>(element.getChildren());
|
||||
for (Element el : le)
|
||||
{
|
||||
if ((el.getAttributeValue("DISCOVER").equalsIgnoreCase("true"))
|
||||
&& (el.getAttributeValue("DISPLAY").equalsIgnoreCase("true")))
|
||||
{
|
||||
if (el.getAttributeValue("DELETE").equalsIgnoreCase("false"))
|
||||
{
|
||||
if (el.getAttributeValue("MODIFY").equalsIgnoreCase("false"))
|
||||
{
|
||||
rp.setAction(Constants.READ);
|
||||
}
|
||||
else
|
||||
{
|
||||
rp.setAction(Constants.WRITE);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (el.getAttributeValue("MODIFY").equalsIgnoreCase("true"))
|
||||
{
|
||||
rp.setAction(Constants.DELETE);
|
||||
if ((el.getAttributeValue("COPY").equalsIgnoreCase("true"))
|
||||
&&(el.getAttributeValue("DUPLICATE").equalsIgnoreCase("true"))
|
||||
&&(el.getAttributeValue("PRINT").equalsIgnoreCase("true")))
|
||||
{
|
||||
rp.setAction(Constants.ADMIN);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
policies.add(rp);
|
||||
} catch (NullPointerException ex) {
|
||||
java.util.logging.Logger.getLogger(METSRightsCrosswalk.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
assignPermissions(context, dso, policies);
|
||||
}
|
||||
else
|
||||
// Must be a "Context" section (where permissions are stored)
|
||||
if (element.getName().equals("Context"))
|
||||
{
|
||||
//also get reference to the <Permissions> element
|
||||
//get what class of context this is
|
||||
String contextClass = element.getAttributeValue("CONTEXTCLASS");
|
||||
|
||||
ResourcePolicy rp = ResourcePolicy.create(context);
|
||||
SimpleDateFormat sdf = new SimpleDateFormat( "yyyy-MM-dd" );
|
||||
|
||||
// get reference to the <Permissions> element
|
||||
// Note: we are assuming here that there will only ever be ONE <Permissions>
|
||||
// element. Currently there are no known use cases for multiple.
|
||||
Element permsElement = element.getChild("Permissions", METSRights_NS);
|
||||
if(permsElement == null) {
|
||||
log.error("No <Permissions> element was found. Skipping this <Context> element.");
|
||||
continue;
|
||||
}
|
||||
|
||||
if (element.getAttributeValue("rpName") != null)
|
||||
{
|
||||
rp.setRpName(element.getAttributeValue("rpName"));
|
||||
}
|
||||
try {
|
||||
if (element.getAttributeValue("start-date") != null)
|
||||
{
|
||||
rp.setStartDate(sdf.parse(element.getAttributeValue("start-date")));
|
||||
}
|
||||
if (element.getAttributeValue("end-date") != null)
|
||||
{
|
||||
rp.setEndDate(sdf.parse(element.getAttributeValue("end-date")));
|
||||
}
|
||||
}catch (ParseException ex) {
|
||||
log.error("Failed to parse embargo date. The date needs to be in the format 'yyyy-MM-dd'.", ex);
|
||||
}
|
||||
|
||||
//Check if this permission pertains to Anonymous users
|
||||
if(ANONYMOUS_CONTEXTCLASS.equals(contextClass))
|
||||
{
|
||||
//get DSpace Anonymous group, ID=0
|
||||
Group anonGroup = Group.find(context, 0);
|
||||
Group anonGroup = Group.find(context, Group.ANONYMOUS_ID);
|
||||
if(anonGroup==null)
|
||||
{
|
||||
throw new CrosswalkInternalException("The DSpace database has not been properly initialized. The Anonymous Group is missing from the database.");
|
||||
}
|
||||
|
||||
assignPermissions(context, dso, anonGroup, permsElement);
|
||||
rp.setGroup(anonGroup);
|
||||
} // else if this permission declaration pertains to Administrators
|
||||
else if(ADMIN_CONTEXTCLASS.equals(contextClass))
|
||||
{
|
||||
//get DSpace Administrator group, ID=1
|
||||
Group adminGroup = Group.find(context, 1);
|
||||
Group adminGroup = Group.find(context, Group.ADMIN_ID);
|
||||
if(adminGroup==null)
|
||||
{
|
||||
throw new CrosswalkInternalException("The DSpace database has not been properly initialized. The Administrator Group is missing from the database.");
|
||||
}
|
||||
|
||||
assignPermissions(context, dso, adminGroup, permsElement);
|
||||
rp.setGroup(adminGroup);
|
||||
} // else if this permission pertains to another DSpace group
|
||||
else if(GROUP_CONTEXTCLASS.equals(contextClass))
|
||||
{
|
||||
@@ -591,8 +554,8 @@ public class METSRightsCrosswalk
|
||||
+ "Please restore this group using the SITE AIP, or recreate it.");
|
||||
}
|
||||
|
||||
//assign permissions to group on this object
|
||||
assignPermissions(context, dso, group, permsElement);
|
||||
//assign group to policy
|
||||
rp.setGroup(group);
|
||||
}
|
||||
catch(PackageException pe)
|
||||
{
|
||||
@@ -600,7 +563,7 @@ public class METSRightsCrosswalk
|
||||
//We'll just wrap it as a CrosswalkException and throw it upwards
|
||||
throw new CrosswalkException(pe);
|
||||
}
|
||||
}//end if Group
|
||||
}// else if this permission pertains to a DSpace person
|
||||
else if(PERSON_CONTEXTCLASS.equals(contextClass))
|
||||
{
|
||||
//we need to find the person it pertains to
|
||||
@@ -629,89 +592,26 @@ public class METSRightsCrosswalk
|
||||
+ "Please restore this Person object using the SITE AIP, or recreate it.");
|
||||
}
|
||||
|
||||
//assign permissions to person on this object
|
||||
assignPermissions(context, dso, person, permsElement);
|
||||
//assign person to the policy
|
||||
rp.setEPerson(person);
|
||||
}//end if Person
|
||||
else
|
||||
else {
|
||||
log.error("Unrecognized CONTEXTCLASS: " + contextClass);
|
||||
}
|
||||
} //end if "Context" element
|
||||
}//end while loop
|
||||
}
|
||||
}
|
||||
|
||||
//set permissions on policy add to list of policies
|
||||
rp.setAction(parsePermissions(permsElement));
|
||||
policies.add(rp);
|
||||
} //end if "Context" element
|
||||
}//end for loop
|
||||
|
||||
/**
|
||||
* Parses the 'permsElement' (corresponding to a <code>Permissions</code>
|
||||
* element), and assigns those permissions to the specified Group
|
||||
* on the specified DSpace Object.
|
||||
*
|
||||
* @param context DSpace context object
|
||||
* @param dso The DSpace Object
|
||||
* @param group The DSpace Group
|
||||
* @param permsElement The METSRights <code>Permissions</code> element
|
||||
*/
|
||||
private void assignPermissions(Context context, DSpaceObject dso, List<ResourcePolicy> policies)
|
||||
throws SQLException, AuthorizeException
|
||||
{
|
||||
AuthorizeManager.removeAllPolicies(context, dso);
|
||||
if (policies == null){
|
||||
throw new AuthorizeException("Policies are null");
|
||||
}
|
||||
else{
|
||||
// Finally, we need to remove any existing policies from the current object,
|
||||
// and replace them with the policies provided via METSRights. NOTE:
|
||||
// if the list of policies provided by METSRights is an empty list, then
|
||||
// the final object will have no policies attached.
|
||||
AuthorizeManager.removeAllPolicies(context, dso);
|
||||
AuthorizeManager.addPolicies(context, policies, dso);
|
||||
}
|
||||
}
|
||||
|
||||
private void assignPermissions(Context context, DSpaceObject dso, Group group, Element permsElement)
|
||||
throws SQLException, AuthorizeException
|
||||
{
|
||||
//first, parse our permissions to determine which action we are allowing in DSpace
|
||||
int actionID = parsePermissions(permsElement);
|
||||
|
||||
//If action ID is less than base READ permissions (value=0),
|
||||
// then something must've gone wrong in the parsing
|
||||
if(actionID < Constants.READ)
|
||||
{
|
||||
log.warn("Unable to properly restore all access permissions on object ("
|
||||
+ "type=" + Constants.typeText[dso.getType()] + ", "
|
||||
+ "handle=" + dso.getHandle() + ", "
|
||||
+ "ID=" + dso.getID()
|
||||
+ ") for group '" + group.getName() + "'.");
|
||||
}
|
||||
|
||||
//Otherwise, add the appropriate group policy for this object
|
||||
AuthorizeManager.addPolicy(context, dso, actionID, group);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the 'permsElement' (corresponding to a <code>Permissions</code>
|
||||
* element), and assigns those permissions to the specified EPerson
|
||||
* on the specified DSpace Object.
|
||||
*
|
||||
* @param context DSpace context object
|
||||
* @param dso The DSpace Object
|
||||
* @param person The DSpace EPerson
|
||||
* @param permsElement The METSRights <code>Permissions</code> element
|
||||
*/
|
||||
private void assignPermissions(Context context, DSpaceObject dso, EPerson person, Element permsElement)
|
||||
throws SQLException, AuthorizeException
|
||||
{
|
||||
//first, parse our permissions to determine which action we are allowing in DSpace
|
||||
int actionID = parsePermissions(permsElement);
|
||||
|
||||
//If action ID is less than base READ permissions (value=0),
|
||||
// then something must've gone wrong in the parsing
|
||||
if(actionID < Constants.READ)
|
||||
{
|
||||
log.warn("Unable to properly restore all access permissions on object ("
|
||||
+ "type=" + Constants.typeText[dso.getType()] + ", "
|
||||
+ "handle=" + dso.getHandle() + ", "
|
||||
+ "ID=" + dso.getID()
|
||||
+ ") for person '" + person.getEmail() + "'.");
|
||||
}
|
||||
|
||||
//Otherwise, add the appropriate EPerson policy for this object
|
||||
AuthorizeManager.addPolicy(context, dso, actionID, person);
|
||||
} // end else
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -784,4 +684,5 @@ private void assignPermissions(Context context, DSpaceObject dso, List<ResourceP
|
||||
// return -1 to signify failure (as 0 = READ permissions)
|
||||
return -1;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -21,15 +21,7 @@ import java.util.zip.ZipFile;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.BitstreamFormat;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.FormatIdentifier;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.WorkspaceItem;
|
||||
import org.dspace.content.*;
|
||||
import org.dspace.content.crosswalk.CrosswalkException;
|
||||
import org.dspace.content.crosswalk.MetadataValidationException;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
@@ -37,6 +29,8 @@ import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.handle.HandleManager;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
|
||||
import org.jdom.Element;
|
||||
|
||||
/**
|
||||
@@ -324,18 +318,18 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester
|
||||
}
|
||||
else
|
||||
{
|
||||
ZipFile zip = new ZipFile(pkgFile);
|
||||
try(ZipFile zip = new ZipFile(pkgFile))
|
||||
{
|
||||
// Retrieve the manifest file entry (named mets.xml)
|
||||
ZipEntry manifestEntry = zip.getEntry(METSManifest.MANIFEST_FILE);
|
||||
|
||||
// Retrieve the manifest file entry (named mets.xml)
|
||||
ZipEntry manifestEntry = zip.getEntry(METSManifest.MANIFEST_FILE);
|
||||
|
||||
// parse the manifest and sanity-check it.
|
||||
manifest = METSManifest.create(zip.getInputStream(manifestEntry),
|
||||
validate, getConfigurationName());
|
||||
|
||||
// close the Zip file for now
|
||||
// (we'll extract the other files from zip when we need them)
|
||||
zip.close();
|
||||
if(manifestEntry!=null)
|
||||
{
|
||||
// parse the manifest and sanity-check it.
|
||||
manifest = METSManifest.create(zip.getInputStream(manifestEntry),
|
||||
validate, getConfigurationName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// return our parsed out METS manifest
|
||||
@@ -660,8 +654,24 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester
|
||||
addBitstreams(context, item, manifest, pkgFile, params, callback);
|
||||
|
||||
// have subclass manage license since it may be extra package file.
|
||||
addLicense(context, item, license, (Collection) dso
|
||||
.getParentObject(), params);
|
||||
Collection owningCollection = (Collection) dso.getParentObject();
|
||||
if(owningCollection == null)
|
||||
{
|
||||
//We are probably dealing with an item that isn't archived yet
|
||||
InProgressSubmission inProgressSubmission = WorkspaceItem.findByItem(context, item);
|
||||
if(inProgressSubmission == null)
|
||||
{
|
||||
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("xmlworkflow"))
|
||||
{
|
||||
inProgressSubmission = XmlWorkflowItem.findByItem(context, item);
|
||||
}else{
|
||||
inProgressSubmission = WorkflowItem.findByItem(context, item);
|
||||
}
|
||||
}
|
||||
owningCollection = inProgressSubmission.getCollection();
|
||||
}
|
||||
|
||||
addLicense(context, item, license, owningCollection, params);
|
||||
|
||||
// FIXME ?
|
||||
// should set lastModifiedTime e.g. when ingesting AIP.
|
||||
|
@@ -127,6 +127,8 @@ public class Constants
|
||||
*/
|
||||
public static final int ADMIN = 11;
|
||||
|
||||
public static final int WITHDRAWN_READ = 12;
|
||||
|
||||
/** Position of front page news item -- top box */
|
||||
public static final int NEWS_TOP = 0;
|
||||
|
||||
@@ -139,7 +141,7 @@ public class Constants
|
||||
public static final String[] actionText = { "READ", "WRITE",
|
||||
"OBSOLETE (DELETE)", "ADD", "REMOVE", "WORKFLOW_STEP_1",
|
||||
"WORKFLOW_STEP_2", "WORKFLOW_STEP_3", "WORKFLOW_ABORT",
|
||||
"DEFAULT_BITSTREAM_READ", "DEFAULT_ITEM_READ", "ADMIN" };
|
||||
"DEFAULT_BITSTREAM_READ", "DEFAULT_ITEM_READ", "ADMIN", "WITHDRAWN_READ" };
|
||||
|
||||
/**
|
||||
* generating constants for the relevance array dynamically is simple: just
|
||||
@@ -175,7 +177,9 @@ public class Constants
|
||||
0, // 8 - WORKFLOW_ABORT
|
||||
RCOLLECTION, // 9 - DEFAULT_BITSTREAM_READ
|
||||
RCOLLECTION, // 10 - DEFAULT_ITEM_READ
|
||||
RITEM | RCOLLECTION | RCOMMUNITY // 11 - ADMIN
|
||||
RITEM | RCOLLECTION | RCOMMUNITY, // 11 - ADMIN
|
||||
RBITSTREAM | RBUNDLE | RITEM // 12 - WITHDRAWN_READ
|
||||
|
||||
};
|
||||
|
||||
public static final String DEFAULT_ENCODING = "UTF-8";
|
||||
|
@@ -474,8 +474,18 @@ public class Email
|
||||
System.out.println(" - To: " + to);
|
||||
System.out.println(" - Subject: " + subject);
|
||||
System.out.println(" - Server: " + server);
|
||||
boolean disabled = ConfigurationManager.getBooleanProperty("mail.server.disabled", false);
|
||||
try
|
||||
{
|
||||
if( disabled)
|
||||
{
|
||||
System.err.println("\nError sending email:");
|
||||
System.err.println(" - Error: cannot test email because mail.server.disabled is set to true");
|
||||
System.err.println("\nPlease see the DSpace documentation for assistance.\n");
|
||||
System.err.println("\n");
|
||||
System.exit(1);
|
||||
return;
|
||||
}
|
||||
e.send();
|
||||
}
|
||||
catch (MessagingException me)
|
||||
|
@@ -16,6 +16,9 @@ import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.PrintWriter;
|
||||
|
||||
import org.dspace.core.service.NewsService;
|
||||
import org.dspace.utils.DSpace;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -39,6 +42,10 @@ public class NewsManager
|
||||
*/
|
||||
public static String readNewsFile(String newsFile)
|
||||
{
|
||||
NewsService newsService = new DSpace().getSingletonService(NewsService.class);
|
||||
if (!newsService.validate(newsFile)) {
|
||||
throw new IllegalArgumentException("The file "+ newsFile + " is not a valid news file");
|
||||
}
|
||||
String fileName = getNewsFilePath();
|
||||
|
||||
fileName += newsFile;
|
||||
@@ -81,6 +88,10 @@ public class NewsManager
|
||||
*/
|
||||
public static String writeNewsFile(String newsFile, String news)
|
||||
{
|
||||
NewsService newsService = new DSpace().getSingletonService(NewsService.class);
|
||||
if (!newsService.validate(newsFile)) {
|
||||
throw new IllegalArgumentException("The file "+ newsFile + " is not a valid news file");
|
||||
}
|
||||
String fileName = getNewsFilePath();
|
||||
|
||||
fileName += newsFile;
|
||||
|
@@ -0,0 +1,29 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.core.service.NewsService;
|
||||
|
||||
public class NewsServiceImpl implements NewsService {
|
||||
private List<String> acceptableFilenames;
|
||||
|
||||
public void setAcceptableFilenames(List<String> acceptableFilenames) {
|
||||
this.acceptableFilenames = acceptableFilenames;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean validate(String newsName) {
|
||||
if (acceptableFilenames != null) {
|
||||
return acceptableFilenames.contains(newsName);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,12 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.core.service;
|
||||
|
||||
public interface NewsService {
|
||||
boolean validate(String newsName);
|
||||
}
|
@@ -164,7 +164,7 @@ public class CurationCli
|
||||
}
|
||||
else
|
||||
{
|
||||
c.setIgnoreAuthorization(true);
|
||||
c.turnOffAuthorisationSystem();
|
||||
}
|
||||
|
||||
Curator curator = new Curator();
|
||||
|
@@ -29,36 +29,50 @@ import org.dspace.core.PluginManager;
|
||||
/**
|
||||
* TaskResolver takes a logical name of a curation task and attempts to deliver
|
||||
* a suitable implementation object. Supported implementation types include:
|
||||
* (1) Classpath-local Java classes configured and loaded via PluginManager.
|
||||
* (2) Local script-based tasks, viz. coded in any scripting language whose
|
||||
* <ol>
|
||||
* <li> Classpath-local Java classes configured and loaded via PluginManager</li>.
|
||||
* <li> Local script-based tasks, viz. coded in any scripting language whose
|
||||
* runtimes are accessible via the JSR-223 scripting API. This really amounts
|
||||
* to the family of dynamic JVM languages: JRuby, Jython, Groovy, Javascript, etc
|
||||
* Note that the requisite jars and other resources for these languages must be
|
||||
* installed in the DSpace instance for them to be used here.
|
||||
* to the family of dynamic JVM languages: JRuby, Jython, Groovy, Javascript, etc.</li>
|
||||
* </ol>
|
||||
* <p>
|
||||
* Note that the requisite jars and other resources for these languages must
|
||||
* be installed in the DSpace instance for them to be used here.
|
||||
* Further work may involve remote URL-loadable code, etc.
|
||||
*
|
||||
* <p>
|
||||
* Scripted tasks are managed in a directory configured with the
|
||||
* dspace/config/modules/curate.cfg property "script.dir". A catalog of
|
||||
* scripted tasks named 'task.catalog" is kept in this directory.
|
||||
* {@code dspace/config/modules/curate.cfg} property "script.dir".
|
||||
* A catalog of
|
||||
* scripted tasks named "task.catalog" is kept in this directory.
|
||||
* Each task has a 'descriptor' property with value syntax:
|
||||
* <engine>|<relFilePath>|<implClassCtor>
|
||||
* <br/>
|
||||
* {@code <engine>|<relFilePath>|<implClassCtor>}
|
||||
*
|
||||
* <p>
|
||||
* An example property:
|
||||
*
|
||||
* linkchecker = ruby|rubytask.rb|LinkChecker.new
|
||||
*
|
||||
* <br/>
|
||||
* {@code linkchecker = ruby|rubytask.rb|LinkChecker.new}
|
||||
*
|
||||
* <p>
|
||||
* This descriptor means that a 'ruby' script engine will be created,
|
||||
* a script file named 'rubytask.rb' in the directory <script.dir> will be
|
||||
* loaded and the resolver will expect an evaluation of 'LinkChecker.new' will
|
||||
* provide a correct implementation object.
|
||||
*
|
||||
* a script file named 'rubytask.rb' in the directory {@code <script.dir>}
|
||||
* will be
|
||||
* loaded and the resolver will expect an evaluation of 'LinkChecker.new'
|
||||
* will provide a correct implementation object.
|
||||
*
|
||||
* <p>
|
||||
* Script files may embed their descriptors to facilitate deployment.
|
||||
* To accomplish this, a script must include the descriptor string with syntax:
|
||||
* $td=<descriptor> somewhere on a comment line. for example:
|
||||
*
|
||||
* # My descriptor $td=ruby|rubytask.rb|LinkChecker.new
|
||||
*
|
||||
* For portability, the <relFilePath> component may be omitted in this context.
|
||||
* Thus, $td=ruby||LinkChecker.new will be expanded to a descriptor
|
||||
* To accomplish this, a script must include the descriptor string with
|
||||
* syntax {@code $td=<descriptor>} somewhere on a comment line. For example:
|
||||
*
|
||||
* <p>
|
||||
* {@code # My descriptor $td=ruby|rubytask.rb|LinkChecker.new}
|
||||
*
|
||||
* <p>
|
||||
* For portability, the {@code <relFilePath>} component may be omitted in
|
||||
* this context.
|
||||
* Thus, {@code $td=ruby||LinkChecker.new} will be expanded to a descriptor
|
||||
* with the name of the embedding file.
|
||||
*
|
||||
* @author richardrodgers
|
||||
|
@@ -218,10 +218,20 @@ public class DiscoverQuery {
|
||||
this.facetOffset = facetOffset;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the fields which you want Discovery to return in the search results.
|
||||
* It is HIGHLY recommended to limit the fields returned, as by default
|
||||
* some backends (like Solr) will return everything.
|
||||
* @param field field to add to the list of fields returned
|
||||
*/
|
||||
public void addSearchField(String field){
|
||||
this.searchFields.add(field);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of fields which Discovery will return in the search results
|
||||
* @return List of field names
|
||||
*/
|
||||
public List<String> getSearchFields() {
|
||||
return searchFields;
|
||||
}
|
||||
|
@@ -39,7 +39,7 @@ public class IndexClient {
|
||||
public static void main(String[] args) throws SQLException, IOException, SearchServiceException {
|
||||
|
||||
Context context = new Context();
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
String usage = "org.dspace.discovery.IndexClient [-cbhf[r <item handle>]] or nothing to update/clean an existing index.";
|
||||
Options options = new Options();
|
||||
|
@@ -113,4 +113,11 @@ public interface SearchService {
|
||||
* @return the indexed field
|
||||
*/
|
||||
String toSortFieldIndex(String metadataField, String type);
|
||||
|
||||
/**
|
||||
* Utility method to escape any special characters in a user's query
|
||||
* @param query
|
||||
* @return query with any special characters escaped
|
||||
*/
|
||||
String escapeQueryChars(String query);
|
||||
}
|
||||
|
@@ -119,6 +119,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
private static final Logger log = Logger.getLogger(SolrServiceImpl.class);
|
||||
|
||||
protected static final String LAST_INDEXED_FIELD = "SolrIndexer.lastIndexed";
|
||||
protected static final String HANDLE_FIELD = "handle";
|
||||
protected static final String RESOURCE_TYPE_FIELD = "search.resourcetype";
|
||||
protected static final String RESOURCE_ID_FIELD = "search.resourceid";
|
||||
|
||||
public static final String FILTER_SEPARATOR = "\n|||\n";
|
||||
|
||||
@@ -149,9 +152,11 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
|
||||
solr.setBaseURL(solrService);
|
||||
solr.setUseMultiPartPost(true);
|
||||
// Dummy/test query to search for Item (type=2) of ID=1
|
||||
SolrQuery solrQuery = new SolrQuery()
|
||||
.setQuery("search.resourcetype:2 AND search.resourceid:1");
|
||||
|
||||
.setQuery(RESOURCE_TYPE_FIELD + ":2 AND " + RESOURCE_ID_FIELD + ":1");
|
||||
// Only return obj identifier fields in result doc
|
||||
solrQuery.setFields(RESOURCE_TYPE_FIELD, RESOURCE_ID_FIELD);
|
||||
solr.query(solrQuery);
|
||||
|
||||
// As long as Solr initialized, check with DatabaseUtils to see
|
||||
@@ -323,7 +328,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
|
||||
try {
|
||||
if(getSolr() != null){
|
||||
getSolr().deleteByQuery("handle:\"" + handle + "\"");
|
||||
getSolr().deleteByQuery(HANDLE_FIELD + ":\"" + handle + "\"");
|
||||
if(commit)
|
||||
{
|
||||
getSolr().commit();
|
||||
@@ -462,10 +467,13 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
}
|
||||
if (force)
|
||||
{
|
||||
getSolr().deleteByQuery("search.resourcetype:[2 TO 4]");
|
||||
getSolr().deleteByQuery(RESOURCE_TYPE_FIELD + ":[2 TO 4]");
|
||||
} else {
|
||||
SolrQuery query = new SolrQuery();
|
||||
query.setQuery("search.resourcetype:[2 TO 4]");
|
||||
// Query for all indexed Items, Collections and Communities,
|
||||
// returning just their handle
|
||||
query.setFields(HANDLE_FIELD);
|
||||
query.setQuery(RESOURCE_TYPE_FIELD + ":[2 TO 4]");
|
||||
QueryResponse rsp = getSolr().query(query);
|
||||
SolrDocumentList docs = rsp.getResults();
|
||||
|
||||
@@ -475,7 +483,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
|
||||
SolrDocument doc = (SolrDocument) iter.next();
|
||||
|
||||
String handle = (String) doc.getFieldValue("handle");
|
||||
String handle = (String) doc.getFieldValue(HANDLE_FIELD);
|
||||
|
||||
DSpaceObject o = HandleManager.resolveToObject(context, handle);
|
||||
|
||||
@@ -616,7 +624,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
boolean inIndex = false;
|
||||
|
||||
SolrQuery query = new SolrQuery();
|
||||
query.setQuery("handle:" + handle);
|
||||
query.setQuery(HANDLE_FIELD + ":" + handle);
|
||||
// Specify that we ONLY want the LAST_INDEXED_FIELD returned in the field list (fl)
|
||||
query.setFields(LAST_INDEXED_FIELD);
|
||||
QueryResponse rsp;
|
||||
|
||||
try {
|
||||
@@ -1444,9 +1454,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
// New fields to weaken the dependence on handles, and allow for faster
|
||||
// list display
|
||||
doc.addField("search.uniqueid", type+"-"+id);
|
||||
doc.addField("search.resourcetype", Integer.toString(type));
|
||||
doc.addField(RESOURCE_TYPE_FIELD, Integer.toString(type));
|
||||
|
||||
doc.addField("search.resourceid", Integer.toString(id));
|
||||
doc.addField(RESOURCE_ID_FIELD, Integer.toString(id));
|
||||
|
||||
// want to be able to search for handle, so use keyword
|
||||
// (not tokenized, but it is indexed)
|
||||
@@ -1454,7 +1464,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
{
|
||||
// want to be able to search for handle, so use keyword
|
||||
// (not tokenized, but it is indexed)
|
||||
doc.addField("handle", handle);
|
||||
doc.addField(HANDLE_FIELD, handle);
|
||||
}
|
||||
|
||||
if (locations != null)
|
||||
@@ -1584,7 +1594,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
discoveryQuery.addFilterQueries("location:l" + dso.getID());
|
||||
} else if (dso instanceof Item)
|
||||
{
|
||||
discoveryQuery.addFilterQueries("handle:" + dso.getHandle());
|
||||
discoveryQuery.addFilterQueries(HANDLE_FIELD + ":" + dso.getHandle());
|
||||
}
|
||||
}
|
||||
return search(context, discoveryQuery, includeUnDiscoverable);
|
||||
@@ -1620,6 +1630,18 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
}
|
||||
|
||||
solrQuery.setQuery(query);
|
||||
|
||||
// Add any search fields to our query. This is the limited list
|
||||
// of fields that will be returned in the solr result
|
||||
for(String fieldName : discoveryQuery.getSearchFields())
|
||||
{
|
||||
solrQuery.addField(fieldName);
|
||||
}
|
||||
// Also ensure a few key obj identifier fields are returned with every query
|
||||
solrQuery.addField(HANDLE_FIELD);
|
||||
solrQuery.addField(RESOURCE_TYPE_FIELD);
|
||||
solrQuery.addField(RESOURCE_ID_FIELD);
|
||||
|
||||
if(discoveryQuery.isSpellCheck())
|
||||
{
|
||||
solrQuery.setParam(SpellingParams.SPELLCHECK_Q, query);
|
||||
@@ -1640,7 +1662,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
}
|
||||
if(discoveryQuery.getDSpaceObjectFilter() != -1)
|
||||
{
|
||||
solrQuery.addFilterQuery("search.resourcetype:" + discoveryQuery.getDSpaceObjectFilter());
|
||||
solrQuery.addFilterQuery(RESOURCE_TYPE_FIELD + ":" + discoveryQuery.getDSpaceObjectFilter());
|
||||
}
|
||||
|
||||
for (int i = 0; i < discoveryQuery.getFieldPresentQueries().size(); i++)
|
||||
@@ -1753,7 +1775,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
query.addFilterQueries("location:l" + dso.getID());
|
||||
} else if (dso instanceof Item)
|
||||
{
|
||||
query.addFilterQueries("handle:" + dso.getHandle());
|
||||
query.addFilterQueries(HANDLE_FIELD + ":" + dso.getHandle());
|
||||
}
|
||||
}
|
||||
return searchJSON(context, query, jsonIdentifier);
|
||||
@@ -1807,7 +1829,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
{
|
||||
result.addDSpaceObject(dso);
|
||||
} else {
|
||||
log.error(LogManager.getHeader(context, "Error while retrieving DSpace object from discovery index", "Handle: " + doc.getFirstValue("handle")));
|
||||
log.error(LogManager.getHeader(context, "Error while retrieving DSpace object from discovery index", "Handle: " + doc.getFirstValue(HANDLE_FIELD)));
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -1926,9 +1948,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
|
||||
protected static DSpaceObject findDSpaceObject(Context context, SolrDocument doc) throws SQLException {
|
||||
|
||||
Integer type = (Integer) doc.getFirstValue("search.resourcetype");
|
||||
Integer id = (Integer) doc.getFirstValue("search.resourceid");
|
||||
String handle = (String) doc.getFirstValue("handle");
|
||||
Integer type = (Integer) doc.getFirstValue(RESOURCE_TYPE_FIELD);
|
||||
Integer id = (Integer) doc.getFirstValue(RESOURCE_ID_FIELD);
|
||||
String handle = (String) doc.getFirstValue(HANDLE_FIELD);
|
||||
|
||||
if (type != null && id != null)
|
||||
{
|
||||
@@ -1981,7 +2003,8 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
|
||||
SolrQuery solrQuery = new SolrQuery();
|
||||
solrQuery.setQuery(query);
|
||||
solrQuery.setFields("search.resourceid", "search.resourcetype");
|
||||
//Only return obj identifier fields in result doc
|
||||
solrQuery.setFields(RESOURCE_ID_FIELD, RESOURCE_TYPE_FIELD);
|
||||
solrQuery.setStart(offset);
|
||||
solrQuery.setRows(max);
|
||||
if (orderfield != null)
|
||||
@@ -2001,7 +2024,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
{
|
||||
SolrDocument doc = (SolrDocument) iter.next();
|
||||
|
||||
DSpaceObject o = DSpaceObject.find(context, (Integer) doc.getFirstValue("search.resourcetype"), (Integer) doc.getFirstValue("search.resourceid"));
|
||||
DSpaceObject o = DSpaceObject.find(context, (Integer) doc.getFirstValue(RESOURCE_TYPE_FIELD), (Integer) doc.getFirstValue(RESOURCE_ID_FIELD));
|
||||
|
||||
if (o != null)
|
||||
{
|
||||
@@ -2089,7 +2112,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
try{
|
||||
SolrQuery solrQuery = new SolrQuery();
|
||||
//Set the query to handle since this is unique
|
||||
solrQuery.setQuery("handle: " + item.getHandle());
|
||||
solrQuery.setQuery(HANDLE_FIELD + ": " + item.getHandle());
|
||||
//Only return obj identifier fields in result doc
|
||||
solrQuery.setFields(HANDLE_FIELD, RESOURCE_TYPE_FIELD, RESOURCE_ID_FIELD);
|
||||
//Add the more like this parameters !
|
||||
solrQuery.setParam(MoreLikeThisParams.MLT, true);
|
||||
//Add a comma separated list of the similar fields
|
||||
@@ -2320,4 +2345,13 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
throw new SearchServiceException(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String escapeQueryChars(String query) {
|
||||
// Use Solr's built in query escape tool
|
||||
// WARNING: You should only escape characters from user entered queries,
|
||||
// otherwise you may accidentally BREAK field-based queries (which often
|
||||
// rely on special characters to separate the field from the query value)
|
||||
return ClientUtils.escapeQueryChars(query);
|
||||
}
|
||||
}
|
||||
|
@@ -91,10 +91,9 @@ public class EmbargoManager
|
||||
}
|
||||
}
|
||||
String slift = myLift.toString();
|
||||
boolean ignoreAuth = context.ignoreAuthorization();
|
||||
try
|
||||
{
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
item.clearMetadata(lift_schema, lift_element, lift_qualifier, Item.ANY);
|
||||
item.addMetadata(lift_schema, lift_element, lift_qualifier, null, slift);
|
||||
log.info("Set embargo on Item "+item.getHandle()+", expires on: "+slift);
|
||||
@@ -105,7 +104,7 @@ public class EmbargoManager
|
||||
}
|
||||
finally
|
||||
{
|
||||
context.setIgnoreAuthorization(ignoreAuth);
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -267,7 +266,7 @@ public class EmbargoManager
|
||||
try
|
||||
{
|
||||
context = new Context();
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
Date now = new Date();
|
||||
|
||||
// scan items under embargo
|
||||
|
@@ -499,7 +499,7 @@ public class EPerson extends DSpaceObject
|
||||
break;
|
||||
|
||||
case LANGUAGE:
|
||||
s = "m_text_value";
|
||||
s = "m.text_value";
|
||||
t = "language";
|
||||
break;
|
||||
case NETID:
|
||||
@@ -507,23 +507,26 @@ public class EPerson extends DSpaceObject
|
||||
break;
|
||||
|
||||
default:
|
||||
s = "m_text_value";
|
||||
s = "m.text_value";
|
||||
t = "lastname";
|
||||
}
|
||||
|
||||
// NOTE: The use of 's' in the order by clause can not cause an SQL
|
||||
// injection because the string is derived from constant values above.
|
||||
TableRowIterator rows = DatabaseManager.query(context, "SELECT * FROM eperson e ORDER BY ?",s);
|
||||
TableRowIterator rows;
|
||||
if(!t.equals("")) {
|
||||
rows = DatabaseManager.query(context,
|
||||
"SELECT * FROM eperson e " +
|
||||
"LEFT JOIN metadatavalue m on (m.resource_id = e.eperson_id and m.resource_type_id = ? and m.metadata_field_id = ?) " +
|
||||
"ORDER BY ?",
|
||||
"LEFT JOIN metadatavalue m on (m.resource_id = e.eperson_id and m.resource_type_id = ? and m.metadata_field_id = ?) " +
|
||||
"ORDER BY " + s,
|
||||
Constants.EPERSON,
|
||||
MetadataField.findByElement(context, MetadataSchema.find(context, "eperson").getSchemaID(), t, null).getFieldID(),
|
||||
s
|
||||
MetadataField.findByElement(context, MetadataSchema.find(context, "eperson").getSchemaID(), t, null).getFieldID()
|
||||
);
|
||||
}
|
||||
else {
|
||||
rows = DatabaseManager.query(context, "SELECT * FROM eperson e ORDER BY " + s);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
@@ -358,9 +358,9 @@ public class Group extends DSpaceObject
|
||||
}
|
||||
|
||||
/**
|
||||
* fast check to see if an eperson is a member called with eperson id, does
|
||||
* database lookup without instantiating all of the epeople objects and is
|
||||
* thus a static method
|
||||
* fast check to see if the current EPerson is a member of a Group. Does
|
||||
* database lookup without instantiating all of the EPerson objects and is
|
||||
* thus a static method.
|
||||
*
|
||||
* @param c
|
||||
* context
|
||||
@@ -380,6 +380,29 @@ public class Group extends DSpaceObject
|
||||
return epersonInGroup(c, groupid, currentuser);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fast check to see if a given EPerson is a member of a Group.
|
||||
* Does database lookup without instantiating all of the EPerson objects and
|
||||
* is thus a static method.
|
||||
*
|
||||
* @param c current DSpace context.
|
||||
* @param eperson candidate to test for membership.
|
||||
* @param groupid group whose membership is to be tested.
|
||||
* @return true if {@link eperson} is a member of Group {@link groupid}.
|
||||
* @throws SQLException passed through
|
||||
*/
|
||||
public static boolean isMember(Context c, EPerson eperson, int groupid)
|
||||
throws SQLException
|
||||
{
|
||||
// Every EPerson is a member of Anonymous
|
||||
if (groupid == 0)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
return epersonInGroup(c, groupid, eperson);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all of the groups that an eperson is a member of.
|
||||
*
|
||||
|
@@ -284,14 +284,23 @@ public class EventManager
|
||||
{
|
||||
Context ctx = new Context();
|
||||
|
||||
for (Iterator ci = ((Dispatcher) dispatcher).getConsumers()
|
||||
.iterator(); ci.hasNext();)
|
||||
{
|
||||
ConsumerProfile cp = (ConsumerProfile) ci.next();
|
||||
if (cp != null)
|
||||
try {
|
||||
|
||||
for (Iterator ci = ((Dispatcher) dispatcher).getConsumers()
|
||||
.iterator(); ci.hasNext();)
|
||||
{
|
||||
cp.getConsumer().finish(ctx);
|
||||
ConsumerProfile cp = (ConsumerProfile) ci.next();
|
||||
if (cp != null)
|
||||
{
|
||||
cp.getConsumer().finish(ctx);
|
||||
}
|
||||
}
|
||||
|
||||
ctx.complete();
|
||||
|
||||
} catch (Exception e) {
|
||||
ctx.abort();
|
||||
throw e;
|
||||
}
|
||||
return;
|
||||
|
||||
|
@@ -9,89 +9,167 @@ package org.dspace.handle;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.InputStreamReader;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.storage.rdbms.DatabaseManager;
|
||||
import org.dspace.storage.rdbms.TableRow;
|
||||
import org.dspace.search.DSIndexer;
|
||||
import org.dspace.browse.IndexBrowse;
|
||||
import org.dspace.discovery.IndexClient;
|
||||
|
||||
/**
|
||||
* A script to update the handle values in the database. This is typically used
|
||||
* when moving from a test machine (handle = 123456789) to a production service.
|
||||
* when moving from a test machine (handle = 123456789) to a production service
|
||||
* or when make a test clone from production service.
|
||||
*
|
||||
* @author Stuart Lewis
|
||||
* @author Ivo Prajer (Czech Technical University in Prague)
|
||||
*/
|
||||
public class UpdateHandlePrefix
|
||||
{
|
||||
|
||||
private static final Logger log = Logger.getLogger(UpdateHandlePrefix.class);
|
||||
|
||||
/**
|
||||
* When invoked as a command-line tool, updates handle prefix
|
||||
*
|
||||
* @param args the command-line arguments, none used
|
||||
* @throws java.lang.Exception
|
||||
*
|
||||
*/
|
||||
public static void main(String[] args) throws Exception
|
||||
{
|
||||
// There should be two paramters
|
||||
// There should be two parameters
|
||||
if (args.length < 2)
|
||||
{
|
||||
System.out.println("\nUsage: update-handle-prefix <old handle> <new handle>\n");
|
||||
System.exit(1);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Confirm with the user that this is what they want to do
|
||||
String oldH = args[0];
|
||||
String newH = args[1];
|
||||
|
||||
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
|
||||
// Get info about changes
|
||||
System.out.println("\nGetting information about handles from database...");
|
||||
Context context = new Context();
|
||||
System.out.println("If you continue, all handles in your repository with prefix " +
|
||||
oldH + " will be updated to have handle prefix " + newH + "\n");
|
||||
String sql = "SELECT count(*) as count FROM handle " +
|
||||
String sql = "SELECT count(*) as count " +
|
||||
"FROM handle " +
|
||||
"WHERE handle LIKE '" + oldH + "%'";
|
||||
TableRow row = DatabaseManager.querySingle(context, sql, new Object[] {});
|
||||
long count = row.getLongColumn("count");
|
||||
System.out.println(count + " items will be updated.\n");
|
||||
System.out.print("Have you taken a backup, and are you ready to continue? [y/n]: ");
|
||||
String choiceString = input.readLine();
|
||||
|
||||
if (choiceString.equalsIgnoreCase("y"))
|
||||
if (count > 0)
|
||||
{
|
||||
// Make the changes
|
||||
System.out.print("Updating handle table... ");
|
||||
sql = "update handle set handle = '" + newH + "' || '/' || handle_id " +
|
||||
"where handle like '" + oldH + "/%'";
|
||||
int updated = DatabaseManager.updateQuery(context, sql, new Object[] {});
|
||||
System.out.println(updated + " items updated");
|
||||
// Print info text about changes
|
||||
System.out.println(
|
||||
"In your repository will be updated " + count + " handle" +
|
||||
((count > 1) ? "s" : "") + " to new prefix " + newH +
|
||||
" from original " + oldH + "!\n"
|
||||
);
|
||||
|
||||
System.out.print("Updating metadatavalues table... ");
|
||||
sql = "UPDATE metadatavalue SET text_value= (SELECT 'http://hdl.handle.net/' || " +
|
||||
"handle FROM handle WHERE handle.resource_id=item_id AND " +
|
||||
"handle.resource_type_id=2) WHERE text_value LIKE 'http://hdl.handle.net/%';";
|
||||
updated = DatabaseManager.updateQuery(context, sql, new Object[] {});
|
||||
System.out.println(updated + " metadata values updated");
|
||||
// Confirm with the user that this is what they want to do
|
||||
System.out.print(
|
||||
"Servlet container (e.g. Apache Tomcat, Jetty, Caucho Resin) must be running.\n" +
|
||||
"If it is necessary, please make a backup of the database.\n" +
|
||||
"Are you ready to continue? [y/n]: "
|
||||
);
|
||||
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
|
||||
String choiceString = input.readLine();
|
||||
|
||||
// Commit the changes
|
||||
context.complete();
|
||||
|
||||
System.out.print("Re-creating browse and search indexes... ");
|
||||
|
||||
// Reinitialise the browse system
|
||||
IndexBrowse.main(new String[] {"-i"});
|
||||
|
||||
// Reinitialise the browse system
|
||||
try
|
||||
if (choiceString.equalsIgnoreCase("y"))
|
||||
{
|
||||
DSIndexer.main(new String[0]);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
// Not a lot we can do
|
||||
System.out.println("Error re-indexing:");
|
||||
e.printStackTrace();
|
||||
System.out.println("\nPlease manually run [dspace]/bin/index-all");
|
||||
}
|
||||
try {
|
||||
log.info("Updating handle prefix from " + oldH + " to " + newH);
|
||||
|
||||
// All done
|
||||
System.out.println("\nHandles successfully updated.");
|
||||
// Make the changes
|
||||
System.out.print("\nUpdating handle table... ");
|
||||
sql = "UPDATE handle " +
|
||||
"SET handle = '" + newH + "' || '/' || handle_id " +
|
||||
"WHERE handle like '" + oldH + "/%'";
|
||||
int updHdl = DatabaseManager.updateQuery(context, sql, new Object[] {});
|
||||
System.out.println(
|
||||
updHdl + " item" + ((updHdl > 1) ? "s" : "") + " updated"
|
||||
);
|
||||
|
||||
System.out.print("Updating metadatavalues table... ");
|
||||
sql = "UPDATE metadatavalue " +
|
||||
"SET text_value = " +
|
||||
"(" +
|
||||
"SELECT 'http://hdl.handle.net/' || handle " +
|
||||
"FROM handle " +
|
||||
"WHERE handle.resource_id = metadatavalue.resource_id " +
|
||||
"AND handle.resource_type_id = 2" +
|
||||
") " +
|
||||
"WHERE text_value LIKE 'http://hdl.handle.net/" + oldH + "/%'" +
|
||||
"AND EXISTS " +
|
||||
"(" +
|
||||
"SELECT 1 " +
|
||||
"FROM handle " +
|
||||
"WHERE handle.resource_id = metadatavalue.resource_id " +
|
||||
"AND handle.resource_type_id = 2" +
|
||||
")";
|
||||
int updMeta = DatabaseManager.updateQuery(context, sql, new Object[] {});
|
||||
System.out.println(
|
||||
updMeta + " metadata value" + ((updMeta > 1) ? "s" : "") + " updated"
|
||||
);
|
||||
|
||||
// Commit the changes
|
||||
context.complete();
|
||||
|
||||
log.info(
|
||||
"Done with updating handle prefix. " +
|
||||
"It was changed " + updHdl + " handle" + ((updHdl > 1) ? "s" : "") +
|
||||
" and " + updMeta + " metadata record" + ((updMeta > 1) ? "s" : "")
|
||||
);
|
||||
|
||||
}
|
||||
catch (SQLException sqle)
|
||||
{
|
||||
if ((context != null) && (context.isValid()))
|
||||
{
|
||||
context.abort();
|
||||
context = null;
|
||||
}
|
||||
System.out.println("\nError during SQL operations.");
|
||||
throw sqle;
|
||||
}
|
||||
|
||||
System.out.println("Handles successfully updated in database.\n");
|
||||
System.out.println("Re-creating browse and search indexes...");
|
||||
|
||||
try
|
||||
{
|
||||
// Reinitialise the search and browse system
|
||||
IndexClient.main(new String[] {"-b"});
|
||||
System.out.println("Browse and search indexes are ready now.");
|
||||
// All done
|
||||
System.out.println("\nAll done successfully. Please check the DSpace logs!\n");
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
// Not a lot we can do
|
||||
System.out.println("Error during re-indexing.");
|
||||
System.out.println(
|
||||
"\n\nAutomatic re-indexing failed. Please perform it manually.\n" +
|
||||
"You should run one of the following commands:\n\n" +
|
||||
" [dspace]/bin/dspace index-discovery -b\n\n" +
|
||||
"If you are using Solr for browse (this is the default setting).\n" +
|
||||
"When launching this command, your servlet container must be running.\n\n" +
|
||||
" [dspace]/bin/dspace index-lucene-init\n\n" +
|
||||
"If you enabled Lucene for search.\n" +
|
||||
"When launching this command, your servlet container must be shutdown.\n"
|
||||
);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
System.out.println("No changes have been made to your data.\n");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
System.out.println("No changes have been made to your data.");
|
||||
System.out.println("Nothing to do! All handles are up-to-date.\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -10,7 +10,6 @@ package org.dspace.identifier;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Metadatum;
|
||||
@@ -68,12 +67,12 @@ public class DOIIdentifierProvider
|
||||
public static final String DOI_QUALIFIER = "uri";
|
||||
|
||||
public static final Integer TO_BE_REGISTERED = 1;
|
||||
public static final Integer TO_BE_RESERVERED = 2;
|
||||
public static final Integer TO_BE_RESERVED = 2;
|
||||
public static final Integer IS_REGISTERED = 3;
|
||||
public static final Integer IS_RESERVED = 4;
|
||||
public static final Integer UPDATE_RESERVERED = 5;
|
||||
public static final Integer UPDATE_RESERVED = 5;
|
||||
public static final Integer UPDATE_REGISTERED = 6;
|
||||
public static final Integer UPDATE_BEFORE_REGISTERATION = 7;
|
||||
public static final Integer UPDATE_BEFORE_REGISTRATION = 7;
|
||||
public static final Integer TO_BE_DELETED = 8;
|
||||
public static final Integer DELETED = 9;
|
||||
|
||||
@@ -251,7 +250,7 @@ public class DOIIdentifierProvider
|
||||
return;
|
||||
}
|
||||
|
||||
doiRow.setColumn("status", TO_BE_RESERVERED);
|
||||
doiRow.setColumn("status", TO_BE_RESERVED);
|
||||
try
|
||||
{
|
||||
DatabaseManager.update(context, doiRow);
|
||||
@@ -353,11 +352,11 @@ public class DOIIdentifierProvider
|
||||
}
|
||||
else if (TO_BE_REGISTERED == doiRow.getIntColumn("status"))
|
||||
{
|
||||
doiRow.setColumn("status", UPDATE_BEFORE_REGISTERATION);
|
||||
doiRow.setColumn("status", UPDATE_BEFORE_REGISTRATION);
|
||||
}
|
||||
else if (IS_RESERVED == doiRow.getIntColumn("status"))
|
||||
{
|
||||
doiRow.setColumn("status", UPDATE_RESERVERED);
|
||||
doiRow.setColumn("status", UPDATE_RESERVED);
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -416,11 +415,11 @@ public class DOIIdentifierProvider
|
||||
{
|
||||
doiRow.setColumn("status", IS_REGISTERED);
|
||||
}
|
||||
else if (UPDATE_BEFORE_REGISTERATION == doiRow.getIntColumn("status"))
|
||||
else if (UPDATE_BEFORE_REGISTRATION == doiRow.getIntColumn("status"))
|
||||
{
|
||||
doiRow.setColumn("status", TO_BE_REGISTERED);
|
||||
}
|
||||
else if (UPDATE_RESERVERED == doiRow.getIntColumn("status"))
|
||||
else if (UPDATE_RESERVED == doiRow.getIntColumn("status"))
|
||||
{
|
||||
doiRow.setColumn("status", IS_RESERVED);
|
||||
}
|
||||
|
@@ -561,7 +561,7 @@ public class EZIDIdentifierProvider
|
||||
/**
|
||||
* Map selected DSpace metadata to fields recognized by DataCite.
|
||||
*/
|
||||
private Map<String, String> crosswalkMetadata(DSpaceObject dso)
|
||||
Map<String, String> crosswalkMetadata(DSpaceObject dso)
|
||||
{
|
||||
if ((null == dso) || !(dso instanceof Item))
|
||||
{
|
||||
@@ -632,18 +632,42 @@ public class EZIDIdentifierProvider
|
||||
mapped.put(DATACITE_PUBLICATION_YEAR, year);
|
||||
}
|
||||
|
||||
// TODO find a way to get a current direct URL to the object and set _target
|
||||
// mapped.put("_target", url);
|
||||
// Supply _target link back to this object
|
||||
String handle = dso.getHandle();
|
||||
if (null == handle)
|
||||
{
|
||||
log.warn("{} #{} has no handle -- location not set.",
|
||||
dso.getTypeText(), dso.getID());
|
||||
}
|
||||
else
|
||||
{
|
||||
String url = configurationService.getProperty("dspace.url")
|
||||
+ "/handle/" + item.getHandle();
|
||||
log.info("Supplying location: {}", url);
|
||||
mapped.put("_target", url);
|
||||
}
|
||||
|
||||
return mapped;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide a map from DSO metadata keys to EZID keys. This will drive the
|
||||
* generation of EZID metadata for the minting of new identifiers.
|
||||
*
|
||||
* @param aCrosswalk
|
||||
*/
|
||||
@Required
|
||||
public void setCrosswalk(Map<String, String> aCrosswalk)
|
||||
{
|
||||
crosswalk = aCrosswalk;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide a map from DSO metadata keys to classes which can transform their
|
||||
* values to something acceptable to EZID.
|
||||
*
|
||||
* @param transformMap
|
||||
*/
|
||||
public void setCrosswalkTransform(Map<String, Transform> transformMap)
|
||||
{
|
||||
transforms = transformMap;
|
||||
|
@@ -179,19 +179,19 @@ public class DOIOrganiser {
|
||||
|
||||
if (line.hasOption('l'))
|
||||
{
|
||||
organiser.list("reservation", null, null, DOIIdentifierProvider.TO_BE_RESERVERED);
|
||||
organiser.list("reservation", null, null, DOIIdentifierProvider.TO_BE_RESERVED);
|
||||
organiser.list("registration", null, null, DOIIdentifierProvider.TO_BE_REGISTERED);
|
||||
organiser.list("update", null, null,
|
||||
DOIIdentifierProvider.UPDATE_BEFORE_REGISTERATION,
|
||||
DOIIdentifierProvider.UPDATE_BEFORE_REGISTRATION,
|
||||
DOIIdentifierProvider.UPDATE_REGISTERED,
|
||||
DOIIdentifierProvider.UPDATE_RESERVERED);
|
||||
DOIIdentifierProvider.UPDATE_RESERVED);
|
||||
organiser.list("deletion", null, null, DOIIdentifierProvider.TO_BE_DELETED);
|
||||
}
|
||||
|
||||
if (line.hasOption('s'))
|
||||
{
|
||||
TableRowIterator it = organiser
|
||||
.getDOIsByStatus(DOIIdentifierProvider.TO_BE_RESERVERED);
|
||||
.getDOIsByStatus(DOIIdentifierProvider.TO_BE_RESERVED);
|
||||
|
||||
try {
|
||||
if (!it.hasNext())
|
||||
@@ -244,8 +244,8 @@ public class DOIOrganiser {
|
||||
if (line.hasOption('u'))
|
||||
{
|
||||
TableRowIterator it = organiser.getDOIsByStatus(
|
||||
DOIIdentifierProvider.UPDATE_BEFORE_REGISTERATION,
|
||||
DOIIdentifierProvider.UPDATE_RESERVERED,
|
||||
DOIIdentifierProvider.UPDATE_BEFORE_REGISTRATION,
|
||||
DOIIdentifierProvider.UPDATE_RESERVED,
|
||||
DOIIdentifierProvider.UPDATE_REGISTERED);
|
||||
|
||||
try {
|
||||
|
@@ -259,7 +259,7 @@ public class CCLookup {
|
||||
throws IOException{
|
||||
|
||||
// Determine the issue URL
|
||||
String issueUrl = this.cc_root + "/license/" + licenseId + "/issue";
|
||||
String issueUrl = cc_root + "/license/" + licenseId + "/issue";
|
||||
// Assemble the "answers" document
|
||||
String answer_doc = "<answers>\n<locale>" + lang + "</locale>\n" + "<license-" + licenseId + ">\n";
|
||||
Iterator keys = answers.keySet().iterator();
|
||||
@@ -411,31 +411,18 @@ public class CCLookup {
|
||||
|
||||
public String getRdf()
|
||||
throws IOException {
|
||||
String myString = null;
|
||||
java.io.ByteArrayOutputStream outputstream = new java.io.ByteArrayOutputStream();
|
||||
String result = "";
|
||||
try {
|
||||
outputstream.write("<result>\n".getBytes());
|
||||
JDOMXPath xpathRdf = new JDOMXPath("//result/rdf");
|
||||
JDOMXPath xpathLicenseRdf = new JDOMXPath("//result/licenserdf");
|
||||
XMLOutputter xmloutputter = new XMLOutputter();
|
||||
Element rdfParent = ((Element)xpathRdf.selectSingleNode(this.license_doc));
|
||||
xmloutputter.output(rdfParent, outputstream);
|
||||
Element licenseRdfParent = ((Element)xpathLicenseRdf.selectSingleNode(this.license_doc));
|
||||
outputstream.write("\n".getBytes());
|
||||
xmloutputter.output(licenseRdfParent, outputstream);
|
||||
outputstream.write("\n</result>\n".getBytes());
|
||||
result = CreativeCommons.fetchLicenseRDF(license_doc);
|
||||
} catch (Exception e) {
|
||||
log.warn("An error occurred getting the rdf . . ." + e.getMessage() );
|
||||
setSuccess(false);
|
||||
} finally {
|
||||
outputstream.close();
|
||||
return outputstream.toString();
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public boolean isSuccess() {
|
||||
setSuccess(false);
|
||||
java.io.ByteArrayOutputStream outputstream = new java.io.ByteArrayOutputStream();
|
||||
JDOMXPath xp_Success = null;
|
||||
String text = null;
|
||||
try {
|
||||
|
@@ -7,13 +7,16 @@
|
||||
*/
|
||||
package org.dspace.license;
|
||||
|
||||
import java.io.*;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.StringWriter;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import javax.xml.transform.Templates;
|
||||
import javax.xml.transform.Transformer;
|
||||
import javax.xml.transform.TransformerConfigurationException;
|
||||
import javax.xml.transform.TransformerException;
|
||||
import javax.xml.transform.TransformerFactory;
|
||||
@@ -26,11 +29,14 @@ import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.BitstreamFormat;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Metadatum;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.Metadatum;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.Utils;
|
||||
import org.jdom.Document;
|
||||
import org.jdom.transform.JDOMResult;
|
||||
import org.jdom.transform.JDOMSource;
|
||||
|
||||
public class CreativeCommons
|
||||
{
|
||||
@@ -46,9 +52,17 @@ public class CreativeCommons
|
||||
|
||||
/**
|
||||
* Some BitStream Names (BSN)
|
||||
*
|
||||
* @deprecated use the metadata retrieved at {@link CreativeCommons#getCCField(String)} (see https://jira.duraspace.org/browse/DS-2604)
|
||||
*/
|
||||
@Deprecated
|
||||
private static final String BSN_LICENSE_URL = "license_url";
|
||||
|
||||
/**
|
||||
*
|
||||
* @deprecated to make uniform JSPUI and XMLUI approach the bitstream with the license in the textual format it is no longer stored (see https://jira.duraspace.org/browse/DS-2604)
|
||||
*/
|
||||
@Deprecated
|
||||
private static final String BSN_LICENSE_TEXT = "license_text";
|
||||
|
||||
private static final String BSN_LICENSE_RDF = "license_rdf";
|
||||
@@ -121,41 +135,6 @@ public class CreativeCommons
|
||||
setBitstreamFromBytes(item, bundle, BSN_LICENSE_RDF, bs_rdf_format, licenseRdf.getBytes());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This is a bit of the "do-the-right-thing" method for CC stuff in an item
|
||||
*/
|
||||
public static void setLicense(Context context, Item item,
|
||||
String cc_license_url) throws SQLException, IOException,
|
||||
AuthorizeException
|
||||
{
|
||||
Bundle bundle = getCcBundle(item);
|
||||
|
||||
// get some more information
|
||||
String license_text = fetchLicenseText(cc_license_url);
|
||||
String license_rdf = fetchLicenseRDF(cc_license_url);
|
||||
|
||||
// set the formats
|
||||
BitstreamFormat bs_url_format = BitstreamFormat.findByShortDescription(
|
||||
context, "License");
|
||||
BitstreamFormat bs_text_format = BitstreamFormat.findByShortDescription(
|
||||
context, "CC License");
|
||||
BitstreamFormat bs_rdf_format = BitstreamFormat.findByShortDescription(
|
||||
context, "RDF XML");
|
||||
|
||||
// set the URL bitstream
|
||||
setBitstreamFromBytes(item, bundle, BSN_LICENSE_URL, bs_url_format,
|
||||
cc_license_url.getBytes());
|
||||
|
||||
// set the license text bitstream
|
||||
setBitstreamFromBytes(item, bundle, BSN_LICENSE_TEXT, bs_text_format,
|
||||
license_text.getBytes());
|
||||
|
||||
// set the RDF bitstream
|
||||
setBitstreamFromBytes(item, bundle, BSN_LICENSE_RDF, bs_rdf_format,
|
||||
license_rdf.getBytes());
|
||||
}
|
||||
|
||||
/**
|
||||
* Used by DSpaceMetsIngester
|
||||
*
|
||||
@@ -224,8 +203,7 @@ public class CreativeCommons
|
||||
// verify it has correct contents
|
||||
try
|
||||
{
|
||||
if ((getLicenseURL(item) == null) || (getLicenseText(item) == null)
|
||||
|| (getLicenseRDF(item) == null))
|
||||
if ((getLicenseURL(item) == null))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
@@ -238,18 +216,6 @@ public class CreativeCommons
|
||||
return true;
|
||||
}
|
||||
|
||||
public static String getLicenseURL(Item item) throws SQLException,
|
||||
IOException, AuthorizeException
|
||||
{
|
||||
return getStringFromBitstream(item, BSN_LICENSE_URL);
|
||||
}
|
||||
|
||||
public static String getLicenseText(Item item) throws SQLException,
|
||||
IOException, AuthorizeException
|
||||
{
|
||||
return getStringFromBitstream(item, BSN_LICENSE_TEXT);
|
||||
}
|
||||
|
||||
public static String getLicenseRDF(Item item) throws SQLException,
|
||||
IOException, AuthorizeException
|
||||
{
|
||||
@@ -269,56 +235,55 @@ public class CreativeCommons
|
||||
/**
|
||||
* Get Creative Commons license Text, returning Bitstream object.
|
||||
* @return bitstream or null.
|
||||
*
|
||||
* @deprecated to make uniform JSPUI and XMLUI approach the bitstream with the license in the textual format it is no longer stored (see https://jira.duraspace.org/browse/DS-2604)
|
||||
*/
|
||||
@Deprecated
|
||||
public static Bitstream getLicenseTextBitstream(Item item) throws SQLException,
|
||||
IOException, AuthorizeException
|
||||
{
|
||||
return getBitstream(item, BSN_LICENSE_TEXT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the license text
|
||||
*
|
||||
* @param item - the item
|
||||
* @return the license in textual format
|
||||
* @throws SQLException
|
||||
* @throws IOException
|
||||
* @throws AuthorizeException
|
||||
*
|
||||
* @deprecated to make uniform JSPUI and XMLUI approach the bitstream with the license in the textual format it is no longer stored (see https://jira.duraspace.org/browse/DS-2604)
|
||||
*/
|
||||
public static String getLicenseText(Item item) throws SQLException, IOException, AuthorizeException {
|
||||
return getStringFromBitstream(item, BSN_LICENSE_TEXT);
|
||||
}
|
||||
|
||||
public static String getLicenseURL(Item item) throws SQLException, IOException, AuthorizeException {
|
||||
String licenseUri = CreativeCommons.getCCField("uri").ccItemValue(item);
|
||||
if (StringUtils.isNotBlank(licenseUri)) {
|
||||
return licenseUri;
|
||||
}
|
||||
// JSPUI backward compatibility see https://jira.duraspace.org/browse/DS-2604
|
||||
return getStringFromBitstream(item, BSN_LICENSE_URL);
|
||||
}
|
||||
|
||||
public static String fetchLicenseRdf(String ccResult) {
|
||||
StringWriter result = new StringWriter();
|
||||
String licenseRdfString = new String("");
|
||||
try {
|
||||
InputStream inputstream = new ByteArrayInputStream(ccResult.getBytes("UTF-8"));
|
||||
templates.newTransformer().transform(new StreamSource(inputstream), new StreamResult(result));
|
||||
} catch (TransformerException te) {
|
||||
throw new RuntimeException("Transformer exception " + te.getMessage(), te);
|
||||
} catch (IOException ioe) {
|
||||
throw new RuntimeException("IOexception " + ioe.getCause().toString(), ioe);
|
||||
} finally {
|
||||
return result.getBuffer().toString();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* The next two methods are old CC.
|
||||
* Remains until prev. usages are eliminated.
|
||||
* @Deprecated
|
||||
*
|
||||
*/
|
||||
/**
|
||||
* Get a few license-specific properties. We expect these to be cached at
|
||||
* least per server run.
|
||||
* Apply same transformation on the document to retrieve only the most relevant part of the document passed as parameter.
|
||||
* If no transformation is needed then take in consideration to empty the CreativeCommons.xml
|
||||
*
|
||||
* @param license - an element that could be contains as part of your content the license rdf
|
||||
* @return the document license in textual format after the transformation
|
||||
*/
|
||||
public static String fetchLicenseText(String license_url)
|
||||
{
|
||||
String text_url = license_url;
|
||||
byte[] urlBytes = fetchURL(text_url);
|
||||
|
||||
return (urlBytes != null) ? new String(urlBytes) : "";
|
||||
}
|
||||
|
||||
public static String fetchLicenseRDF(String license_url)
|
||||
public static String fetchLicenseRDF(Document license)
|
||||
{
|
||||
StringWriter result = new StringWriter();
|
||||
|
||||
try
|
||||
{
|
||||
templates.newTransformer().transform(
|
||||
new StreamSource(license_url + "rdf"),
|
||||
new JDOMSource(license),
|
||||
new StreamResult(result)
|
||||
);
|
||||
}
|
||||
@@ -421,33 +386,6 @@ public class CreativeCommons
|
||||
return baos.toByteArray();
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch the contents of a URL
|
||||
*/
|
||||
private static byte[] fetchURL(String url_string)
|
||||
{
|
||||
try
|
||||
{
|
||||
String line = "";
|
||||
URL url = new URL(url_string);
|
||||
URLConnection connection = url.openConnection();
|
||||
InputStream inputStream = connection.getInputStream();
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
while ((line = reader.readLine()) != null)
|
||||
{
|
||||
sb.append(line);
|
||||
}
|
||||
|
||||
return sb.toString().getBytes();
|
||||
}
|
||||
catch (Exception exc)
|
||||
{
|
||||
log.error(exc.getMessage());
|
||||
return null;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Returns a metadata field handle for given field Id
|
||||
*/
|
||||
@@ -564,4 +502,34 @@ public class CreativeCommons
|
||||
item.addMetadata(params[0], params[1], params[2], params[3], value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove license information, delete also the bitstream
|
||||
*
|
||||
* @param context - DSpace Context
|
||||
* @param uriField - the metadata field for license uri
|
||||
* @param nameField - the metadata field for license name
|
||||
* @param item - the item
|
||||
* @throws AuthorizeException
|
||||
* @throws IOException
|
||||
* @throws SQLException
|
||||
*/
|
||||
public static void removeLicense(Context context, MdField uriField,
|
||||
MdField nameField, Item item) throws AuthorizeException, IOException, SQLException {
|
||||
// only remove any previous licenses
|
||||
String licenseUri = uriField.ccItemValue(item);
|
||||
if (licenseUri != null) {
|
||||
uriField.removeItemValue(item, licenseUri);
|
||||
if (ConfigurationManager.getBooleanProperty("cc.submit.setname"))
|
||||
{
|
||||
String licenseName = nameField.keyedItemValue(item, licenseUri);
|
||||
nameField.removeItemValue(item, licenseName);
|
||||
}
|
||||
if (ConfigurationManager.getBooleanProperty("cc.submit.addbitstream"))
|
||||
{
|
||||
removeLicense(context, item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -72,7 +72,7 @@ public class LicenseCleanup
|
||||
{
|
||||
|
||||
Context ctx = new Context();
|
||||
ctx.setIgnoreAuthorization(true);
|
||||
ctx.turnOffAuthorisationSystem();
|
||||
ItemIterator iter = Item.findAll(ctx);
|
||||
|
||||
Properties props = new Properties();
|
||||
|
@@ -20,6 +20,7 @@ import org.dspace.content.Bundle;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.Site;
|
||||
import org.dspace.content.WorkspaceItem;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.event.Consumer;
|
||||
@@ -52,6 +53,8 @@ public class RDFConsumer implements Consumer
|
||||
}
|
||||
|
||||
int sType = event.getSubjectType();
|
||||
log.debug(event.getEventTypeAsString() + " for "
|
||||
+ event.getSubjectTypeAsString() + ":" + event.getSubjectID());
|
||||
switch (sType)
|
||||
{
|
||||
case (Constants.BITSTREAM) :
|
||||
@@ -100,7 +103,7 @@ public class RDFConsumer implements Consumer
|
||||
Bitstream bitstream = Bitstream.find(ctx, event.getSubjectID());
|
||||
if (bitstream == null)
|
||||
{
|
||||
log.warn("Cannot find bitstream " + event.getSubjectID() + "! "
|
||||
log.debug("Cannot find bitstream " + event.getSubjectID() + "! "
|
||||
+ "Ignoring, as it is likely it was deleted "
|
||||
+ "and we'll cover it by a REMOVE event on its bundle.");
|
||||
return;
|
||||
@@ -111,6 +114,11 @@ public class RDFConsumer implements Consumer
|
||||
Item[] items = b.getItems();
|
||||
for (Item i : items)
|
||||
{
|
||||
if (WorkspaceItem.findByItem(ctx, i) != null)
|
||||
{
|
||||
log.debug("Ignoring Item " + i.getID() + " as a corresponding workspace item exists.");
|
||||
continue;
|
||||
}
|
||||
DSOIdentifier id = new DSOIdentifier(i, ctx);
|
||||
if (!this.toDelete.contains(id) && !this.toConvert.contains(id))
|
||||
{
|
||||
@@ -148,7 +156,7 @@ public class RDFConsumer implements Consumer
|
||||
Bundle bundle = Bundle.find(ctx, event.getSubjectID());
|
||||
if (bundle == null)
|
||||
{
|
||||
log.warn("Cannot find bundle " + event.getSubjectID() + "! "
|
||||
log.debug("Cannot find bundle " + event.getSubjectID() + "! "
|
||||
+ "Ignoring, as it is likely it was deleted "
|
||||
+ "and we'll cover it by a REMOVE event on its item.");
|
||||
return;
|
||||
@@ -156,6 +164,11 @@ public class RDFConsumer implements Consumer
|
||||
Item[] items = bundle.getItems();
|
||||
for (Item i : items)
|
||||
{
|
||||
if (WorkspaceItem.findByItem(ctx, i) != null)
|
||||
{
|
||||
log.debug("Ignoring Item " + i.getID() + " as a corresponding workspace item exists.");
|
||||
continue;
|
||||
}
|
||||
DSOIdentifier id = new DSOIdentifier(i, ctx);
|
||||
if (!this.toDelete.contains(id) && !this.toConvert.contains(id))
|
||||
{
|
||||
@@ -216,14 +229,24 @@ public class RDFConsumer implements Consumer
|
||||
DSpaceObject dso = event.getSubject(ctx);
|
||||
if (dso == null)
|
||||
{
|
||||
log.warn("Cannot find " + event.getSubjectTypeAsString() + " "
|
||||
log.debug("Cannot find " + event.getSubjectTypeAsString() + " "
|
||||
+ event.getSubjectID() + "! " + "Ignoring, as it is "
|
||||
+ "likely it was deleted and we'll cover it by another "
|
||||
+ "event with the type REMOVE.");
|
||||
return;
|
||||
}
|
||||
DSOIdentifier id = new DSOIdentifier(dso, ctx);
|
||||
|
||||
// ignore unfinished submissions here. Every unfinished submission
|
||||
// has an workspace item. The item flag "in_archive" doesn't help us
|
||||
// here as this is also set to false if a newer version was submitted.
|
||||
if (dso instanceof Item
|
||||
&& WorkspaceItem.findByItem(ctx, (Item) dso) != null)
|
||||
{
|
||||
log.debug("Ignoring Item " + dso.getID() + " as a corresponding workspace item exists.");
|
||||
return;
|
||||
}
|
||||
|
||||
DSOIdentifier id = new DSOIdentifier(dso, ctx);
|
||||
// If an item gets withdrawn, a MODIFIY event is fired. We have to
|
||||
// delete the item from the triple store instead of converting it.
|
||||
// we don't have to take care for reinstantions of items as they can
|
||||
|
@@ -392,7 +392,7 @@ public class DSIndexer
|
||||
{
|
||||
setBatchProcessingMode(true);
|
||||
Context context = new Context();
|
||||
context.setIgnoreAuthorization(true);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
String usage = "org.dspace.search.DSIndexer [-cbhof[r <item handle>]] or nothing to update/clean an existing index.";
|
||||
Options options = new Options();
|
||||
|
@@ -7,14 +7,24 @@
|
||||
*/
|
||||
package org.dspace.statistics;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.net.InetAddress;
|
||||
import java.net.URL;
|
||||
import java.sql.SQLException;
|
||||
import java.util.*;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.io.Resources;
|
||||
import com.maxmind.geoip.Location;
|
||||
import com.maxmind.geoip.LookupService;
|
||||
import com.maxmind.geoip2.DatabaseReader;
|
||||
import com.maxmind.geoip2.model.CityResponse;
|
||||
|
||||
import org.apache.commons.lang.exception.ExceptionUtils;
|
||||
import org.apache.commons.lang.time.DateFormatUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import org.dspace.content.*;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
@@ -30,7 +40,6 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequestBuild
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.client.Client;
|
||||
|
||||
import org.elasticsearch.client.transport.TransportClient;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
@@ -41,13 +50,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.node.NodeBuilder;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.sql.SQLException;
|
||||
import java.util.*;
|
||||
|
||||
public class ElasticSearchLogger {
|
||||
|
||||
private static Logger log = Logger.getLogger(ElasticSearchLogger.class);
|
||||
@@ -58,8 +60,6 @@ public class ElasticSearchLogger {
|
||||
|
||||
public static final String DATE_FORMAT_DCDATE = "yyyy-MM-dd'T'HH:mm:ss'Z'";
|
||||
|
||||
private static LookupService locationService;
|
||||
|
||||
public static String clusterName = "dspacestatslogging";
|
||||
public static String indexName = "dspaceindex";
|
||||
public static String indexType = "stats";
|
||||
@@ -68,6 +68,8 @@ public class ElasticSearchLogger {
|
||||
|
||||
private static Client client;
|
||||
|
||||
protected static DatabaseReader locationService;
|
||||
|
||||
public static enum ClientType {
|
||||
NODE, LOCAL, TRANSPORT
|
||||
}
|
||||
@@ -93,16 +95,24 @@ public class ElasticSearchLogger {
|
||||
public void initializeElasticSearch() {
|
||||
log.info("DSpace ElasticSearchLogger Initializing");
|
||||
try {
|
||||
LookupService service = null;
|
||||
DatabaseReader service = null;
|
||||
// Get the db file for the location
|
||||
String dbfile = ConfigurationManager.getProperty("usage-statistics", "dbfile");
|
||||
if (dbfile != null) {
|
||||
String dbPath = ConfigurationManager.getProperty("usage-statistics.dbfile");
|
||||
if (dbPath != null) {
|
||||
try {
|
||||
service = new LookupService(dbfile, LookupService.GEOIP_STANDARD);
|
||||
File dbFile = new File(dbPath);
|
||||
service = new DatabaseReader.Builder(dbFile).build();
|
||||
} catch (FileNotFoundException fe) {
|
||||
log.error("The GeoLite Database file is missing (" + dbfile + ")! Usage Statistics cannot generate location based reports! Please see the DSpace installation instructions for instructions to install this file.", fe);
|
||||
log.error(
|
||||
"The GeoLite Database file is missing (" + dbPath + ")! Usage Statistics cannot generate location"
|
||||
+ " based reports! Please see the DSpace installation instructions for instructions to"
|
||||
+ " install this file.",
|
||||
fe);
|
||||
} catch (IOException e) {
|
||||
log.error("Unable to load GeoLite Database file (" + dbfile + ")! You may need to reinstall it. See the DSpace installation instructions for more details.", e);
|
||||
log.error(
|
||||
"Unable to load GeoLite Database file (" + dbPath + ")! You may need to reinstall it. See the"
|
||||
+ " DSpace installation instructions for more details.",
|
||||
e);
|
||||
}
|
||||
} else {
|
||||
log.error("The required 'dbfile' configuration is missing in usage-statistics.cfg!");
|
||||
@@ -257,21 +267,24 @@ public class ElasticSearchLogger {
|
||||
|
||||
// Save the location information if valid, save the event without
|
||||
// location information if not valid
|
||||
Location location = locationService.getLocation(ip);
|
||||
if (location != null
|
||||
&& !("--".equals(location.countryCode)
|
||||
&& location.latitude == -180 && location.longitude == -180)) {
|
||||
InetAddress ipAddress = InetAddress.getByName(ip);
|
||||
CityResponse location = locationService.city(ipAddress);
|
||||
String countryCode = location.getCountry().getIsoCode();
|
||||
double latitude = location.getLocation().getLatitude();
|
||||
double longitude = location.getLocation().getLongitude();
|
||||
if (!("--".equals(countryCode)
|
||||
&& latitude == -180 && longitude == -180)) {
|
||||
try {
|
||||
docBuilder.field("continent", LocationUtils
|
||||
.getContinentCode(location.countryCode));
|
||||
} catch (Exception e) {
|
||||
.getContinentCode(countryCode));
|
||||
} catch (IOException e) {
|
||||
System.out
|
||||
.println("COUNTRY ERROR: " + location.countryCode);
|
||||
.println("COUNTRY ERROR: " + countryCode);
|
||||
}
|
||||
docBuilder.field("countryCode", location.countryCode);
|
||||
docBuilder.field("city", location.city);
|
||||
docBuilder.field("latitude", location.latitude);
|
||||
docBuilder.field("longitude", location.longitude);
|
||||
docBuilder.field("countryCode", countryCode);
|
||||
docBuilder.field("city", location.getCity().getName());
|
||||
docBuilder.field("latitude", latitude);
|
||||
docBuilder.field("longitude", longitude);
|
||||
docBuilder.field("isBot", isSpiderBot);
|
||||
|
||||
if (request.getHeader("User-Agent") != null) {
|
||||
@@ -373,22 +386,25 @@ public class ElasticSearchLogger {
|
||||
}
|
||||
|
||||
// Save the location information if valid, save the event without
|
||||
// location information if not valid
|
||||
Location location = locationService.getLocation(ip);
|
||||
if (location != null
|
||||
&& !("--".equals(location.countryCode)
|
||||
&& location.latitude == -180 && location.longitude == -180)) {
|
||||
// location information if not valid.
|
||||
InetAddress ipAddress = InetAddress.getByName(ip);
|
||||
CityResponse location = locationService.city(ipAddress);
|
||||
String countryCode = location.getCountry().getIsoCode();
|
||||
double latitude = location.getLocation().getLatitude();
|
||||
double longitude = location.getLocation().getLongitude();
|
||||
if (!("--".equals(countryCode)
|
||||
&& latitude == -180 && longitude == -180)) {
|
||||
try {
|
||||
docBuilder.field("continent", LocationUtils
|
||||
.getContinentCode(location.countryCode));
|
||||
} catch (Exception e) {
|
||||
.getContinentCode(countryCode));
|
||||
} catch (IOException e) {
|
||||
System.out
|
||||
.println("COUNTRY ERROR: " + location.countryCode);
|
||||
.println("COUNTRY ERROR: " + countryCode);
|
||||
}
|
||||
docBuilder.field("countryCode", location.countryCode);
|
||||
docBuilder.field("city", location.city);
|
||||
docBuilder.field("latitude", location.latitude);
|
||||
docBuilder.field("longitude", location.longitude);
|
||||
docBuilder.field("countryCode", countryCode);
|
||||
docBuilder.field("city", location.getCity().getName());
|
||||
docBuilder.field("latitude", latitude);
|
||||
docBuilder.field("longitude", longitude);
|
||||
docBuilder.field("isBot", isSpiderBot);
|
||||
|
||||
if (userAgent != null) {
|
||||
|
@@ -7,10 +7,37 @@
|
||||
*/
|
||||
package org.dspace.statistics;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.InetAddress;
|
||||
import java.net.URLEncoder;
|
||||
import java.sql.SQLException;
|
||||
import java.text.DateFormat;
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import au.com.bytecode.opencsv.CSVReader;
|
||||
import au.com.bytecode.opencsv.CSVWriter;
|
||||
import com.maxmind.geoip.Location;
|
||||
import com.maxmind.geoip.LookupService;
|
||||
import com.maxmind.geoip2.DatabaseReader;
|
||||
import com.maxmind.geoip2.exception.GeoIp2Exception;
|
||||
import com.maxmind.geoip2.model.CityResponse;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang.ArrayUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
@@ -18,23 +45,24 @@ import org.apache.commons.lang.time.DateFormatUtils;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.impl.client.DefaultHttpClient;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.SolrServer;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.client.solrj.impl.HttpSolrServer;
|
||||
import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
|
||||
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
|
||||
import org.apache.solr.client.solrj.request.CoreAdminRequest;
|
||||
import org.apache.solr.client.solrj.request.LukeRequest;
|
||||
import org.apache.solr.client.solrj.response.FacetField;
|
||||
import org.apache.solr.client.solrj.response.LukeResponse;
|
||||
import org.apache.solr.client.solrj.response.QueryResponse;
|
||||
import org.apache.solr.client.solrj.response.RangeFacet;
|
||||
import org.apache.solr.client.solrj.response.SolrPingResponse;
|
||||
import org.apache.solr.client.solrj.util.ClientUtils;
|
||||
import org.apache.solr.common.SolrDocument;
|
||||
import org.apache.solr.common.SolrDocumentList;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.apache.solr.common.luke.FieldFlag;
|
||||
import org.apache.solr.common.params.*;
|
||||
import org.apache.solr.common.util.JavaBinCodec;
|
||||
import org.dspace.content.*;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
@@ -46,15 +74,10 @@ import org.dspace.statistics.util.DnsLookup;
|
||||
import org.dspace.statistics.util.LocationUtils;
|
||||
import org.dspace.statistics.util.SpiderDetector;
|
||||
import org.dspace.usage.UsageWorkflowEvent;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import java.io.*;
|
||||
import java.net.URLEncoder;
|
||||
import java.sql.SQLException;
|
||||
import java.text.DateFormat;
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.*;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Static holder for a HttpSolrClient connection pool to issue
|
||||
@@ -63,19 +86,21 @@ import java.util.*;
|
||||
*
|
||||
* @author ben at atmire.com
|
||||
* @author kevinvandevelde at atmire.com
|
||||
* @author mdiggory at atmire.com
|
||||
* @author mdiggory at atmire.com
|
||||
*/
|
||||
public class SolrLogger
|
||||
{
|
||||
private static final Logger log = Logger.getLogger(SolrLogger.class);
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(SolrLogger.class);
|
||||
|
||||
private static final String MULTIPLE_VALUES_SPLITTER = "|";
|
||||
|
||||
private static final HttpSolrServer solr;
|
||||
|
||||
public static final String DATE_FORMAT_8601 = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
|
||||
|
||||
public static final String DATE_FORMAT_DCDATE = "yyyy-MM-dd'T'HH:mm:ss'Z'";
|
||||
|
||||
private static final LookupService locationService;
|
||||
private static final DatabaseReader locationService;
|
||||
|
||||
private static final boolean useProxies;
|
||||
|
||||
@@ -143,23 +168,24 @@ public class SolrLogger
|
||||
// Read in the file so we don't have to do it all the time
|
||||
//spiderIps = SpiderDetector.getSpiderIpAddresses();
|
||||
|
||||
LookupService service = null;
|
||||
DatabaseReader service = null;
|
||||
// Get the db file for the location
|
||||
String dbfile = ConfigurationManager.getProperty("usage-statistics", "dbfile");
|
||||
if (dbfile != null)
|
||||
{
|
||||
try
|
||||
{
|
||||
service = new LookupService(dbfile,
|
||||
LookupService.GEOIP_STANDARD);
|
||||
}
|
||||
catch (FileNotFoundException fe)
|
||||
{
|
||||
log.error("The GeoLite Database file is missing (" + dbfile + ")! Solr Statistics cannot generate location based reports! Please see the DSpace installation instructions for instructions to install this file.", fe);
|
||||
}
|
||||
catch (IOException e)
|
||||
{
|
||||
log.error("Unable to load GeoLite Database file (" + dbfile + ")! You may need to reinstall it. See the DSpace installation instructions for more details.", e);
|
||||
String dbPath = ConfigurationManager.getProperty("usage-statistics.dbfile");
|
||||
if (dbPath != null) {
|
||||
try {
|
||||
File dbFile = new File(dbPath);
|
||||
service = new DatabaseReader.Builder(dbFile).build();
|
||||
} catch (FileNotFoundException fe) {
|
||||
log.error(
|
||||
"The GeoLite Database file is missing (" + dbPath + ")! Solr Statistics cannot generate location " +
|
||||
"based reports! Please see the DSpace installation instructions for instructions to install " +
|
||||
"this file.",
|
||||
fe);
|
||||
} catch (IOException e) {
|
||||
log.error(
|
||||
"Unable to load GeoLite Database file (" + dbPath + ")! You may need to reinstall it. See the " +
|
||||
"DSpace installation instructions for more details.",
|
||||
e);
|
||||
}
|
||||
}
|
||||
else
|
||||
@@ -331,32 +357,35 @@ public class SolrLogger
|
||||
{
|
||||
doc1.addField("userAgent", request.getHeader("User-Agent"));
|
||||
}
|
||||
doc1.addField("isBot",isSpiderBot);
|
||||
// Save the location information if valid, save the event without
|
||||
// location information if not valid
|
||||
if(locationService != null)
|
||||
{
|
||||
Location location = locationService.getLocation(ip);
|
||||
if (location != null
|
||||
&& !("--".equals(location.countryCode)
|
||||
&& location.latitude == -180 && location.longitude == -180))
|
||||
{
|
||||
try
|
||||
{
|
||||
doc1.addField("continent", LocationUtils
|
||||
.getContinentCode(location.countryCode));
|
||||
if (locationService != null) {
|
||||
try {
|
||||
InetAddress ipAddress = InetAddress.getByName(ip);
|
||||
CityResponse location = locationService.city(ipAddress);
|
||||
String countryCode = location.getCountry().getIsoCode();
|
||||
double latitude = location.getLocation().getLatitude();
|
||||
double longitude = location.getLocation().getLongitude();
|
||||
if (!(
|
||||
"--".equals(countryCode)
|
||||
&& latitude == -180
|
||||
&& longitude == -180)
|
||||
) {
|
||||
try {
|
||||
doc1.addField("continent", LocationUtils
|
||||
.getContinentCode(countryCode));
|
||||
} catch (Exception e) {
|
||||
System.out
|
||||
.println("COUNTRY ERROR: " + countryCode);
|
||||
}
|
||||
doc1.addField("countryCode", countryCode);
|
||||
doc1.addField("city", location.getCity().getName());
|
||||
doc1.addField("latitude", latitude);
|
||||
doc1.addField("longitude", longitude);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
System.out
|
||||
.println("COUNTRY ERROR: " + location.countryCode);
|
||||
}
|
||||
doc1.addField("countryCode", location.countryCode);
|
||||
doc1.addField("city", location.city);
|
||||
doc1.addField("latitude", location.latitude);
|
||||
doc1.addField("longitude", location.longitude);
|
||||
doc1.addField("isBot",isSpiderBot);
|
||||
|
||||
|
||||
} catch (IOException | GeoIp2Exception e) {
|
||||
log.error("Unable to get location of request: {}", e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -416,32 +445,35 @@ public class SolrLogger
|
||||
{
|
||||
doc1.addField("userAgent", userAgent);
|
||||
}
|
||||
doc1.addField("isBot",isSpiderBot);
|
||||
// Save the location information if valid, save the event without
|
||||
// location information if not valid
|
||||
if(locationService != null)
|
||||
{
|
||||
Location location = locationService.getLocation(ip);
|
||||
if (location != null
|
||||
&& !("--".equals(location.countryCode)
|
||||
&& location.latitude == -180 && location.longitude == -180))
|
||||
{
|
||||
try
|
||||
{
|
||||
doc1.addField("continent", LocationUtils
|
||||
.getContinentCode(location.countryCode));
|
||||
if (locationService != null) {
|
||||
try {
|
||||
InetAddress ipAddress = InetAddress.getByName(ip);
|
||||
CityResponse location = locationService.city(ipAddress);
|
||||
String countryCode = location.getCountry().getIsoCode();
|
||||
double latitude = location.getLocation().getLatitude();
|
||||
double longitude = location.getLocation().getLongitude();
|
||||
if (!(
|
||||
"--".equals(countryCode)
|
||||
&& latitude == -180
|
||||
&& longitude == -180)
|
||||
) {
|
||||
try {
|
||||
doc1.addField("continent", LocationUtils
|
||||
.getContinentCode(countryCode));
|
||||
} catch (Exception e) {
|
||||
System.out
|
||||
.println("COUNTRY ERROR: " + countryCode);
|
||||
}
|
||||
doc1.addField("countryCode", countryCode);
|
||||
doc1.addField("city", location.getCity().getName());
|
||||
doc1.addField("latitude", latitude);
|
||||
doc1.addField("longitude", longitude);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
System.out
|
||||
.println("COUNTRY ERROR: " + location.countryCode);
|
||||
}
|
||||
doc1.addField("countryCode", location.countryCode);
|
||||
doc1.addField("city", location.city);
|
||||
doc1.addField("latitude", location.latitude);
|
||||
doc1.addField("longitude", location.longitude);
|
||||
doc1.addField("isBot",isSpiderBot);
|
||||
|
||||
|
||||
} catch (GeoIp2Exception | IOException e) {
|
||||
log.error("Unable to get location of request: {}", e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1310,8 +1342,12 @@ public class SolrLogger
|
||||
yearQueryParams.put(CommonParams.FQ, filterQuery.toString());
|
||||
yearQueryParams.put(CommonParams.WT, "csv");
|
||||
|
||||
//Tell SOLR how to escape and separate the values of multi-valued fields
|
||||
yearQueryParams.put("csv.escape", "\\");
|
||||
yearQueryParams.put("csv.mv.separator", MULTIPLE_VALUES_SPLITTER);
|
||||
|
||||
//Start by creating a new core
|
||||
String coreName = "statistics-" + dcStart.getYear();
|
||||
String coreName = "statistics-" + dcStart.getYearUTC();
|
||||
HttpSolrServer statisticsYearServer = createCore(solr, coreName);
|
||||
|
||||
System.out.println("Moving: " + totalRecords + " into core " + coreName);
|
||||
@@ -1326,7 +1362,7 @@ public class SolrLogger
|
||||
HttpResponse response = new DefaultHttpClient().execute(get);
|
||||
InputStream csvInputstream = response.getEntity().getContent();
|
||||
//Write the csv ouput to a file !
|
||||
File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYear() + "." + i + ".csv");
|
||||
File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYearUTC() + "." + i + ".csv");
|
||||
FileUtils.copyInputStreamToFile(csvInputstream, csvFile);
|
||||
filesToUpload.add(csvFile);
|
||||
|
||||
@@ -1334,13 +1370,22 @@ public class SolrLogger
|
||||
yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000)));
|
||||
}
|
||||
|
||||
Set<String> multivaluedFields = getMultivaluedFieldNames();
|
||||
|
||||
for (File tempCsv : filesToUpload) {
|
||||
//Upload the data in the csv files to our new solr core
|
||||
ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest("/update/csv");
|
||||
contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
|
||||
contentStreamUpdateRequest.setParam("escape", "\\");
|
||||
contentStreamUpdateRequest.setParam("skip", "_version_");
|
||||
contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
|
||||
contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");
|
||||
|
||||
//Add parsing directives for the multivalued fields so that they are stored as separate values instead of one value
|
||||
for (String multivaluedField : multivaluedFields) {
|
||||
contentStreamUpdateRequest.setParam("f." + multivaluedField + ".split", Boolean.TRUE.toString());
|
||||
contentStreamUpdateRequest.setParam("f." + multivaluedField + ".separator", MULTIPLE_VALUES_SPLITTER);
|
||||
}
|
||||
statisticsYearServer.request(contentStreamUpdateRequest);
|
||||
}
|
||||
statisticsYearServer.commit(true, true);
|
||||
@@ -1359,6 +1404,14 @@ public class SolrLogger
|
||||
private static HttpSolrServer createCore(HttpSolrServer solr, String coreName) throws IOException, SolrServerException {
|
||||
String solrDir = ConfigurationManager.getProperty("dspace.dir") + File.separator + "solr" +File.separator;
|
||||
String baseSolrUrl = solr.getBaseURL().replace("statistics", "");
|
||||
HttpSolrServer returnServer = new HttpSolrServer(baseSolrUrl + "/" + coreName);
|
||||
try {
|
||||
SolrPingResponse ping = returnServer.ping();
|
||||
log.debug(String.format("Ping of Solr Core [%s] Returned with Status [%d]", coreName, ping.getStatus()));
|
||||
return returnServer;
|
||||
} catch(Exception e) {
|
||||
log.debug(String.format("Ping of Solr Core [%s] Failed with [%s]. New Core Will be Created", coreName, e.getClass().getName()));
|
||||
}
|
||||
CoreAdminRequest.Create create = new CoreAdminRequest.Create();
|
||||
create.setCoreName(coreName);
|
||||
create.setInstanceDir("statistics");
|
||||
@@ -1366,10 +1419,35 @@ public class SolrLogger
|
||||
HttpSolrServer solrServer = new HttpSolrServer(baseSolrUrl);
|
||||
create.process(solrServer);
|
||||
log.info("Created core with name: " + coreName);
|
||||
return new HttpSolrServer(baseSolrUrl + "/" + coreName);
|
||||
return returnServer;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Retrieves a list of all the multi valued fields in the solr core
|
||||
* @return all fields tagged as multivalued
|
||||
* @throws SolrServerException When getting the schema information from the SOLR core fails
|
||||
* @throws IOException When connection to the SOLR server fails
|
||||
*/
|
||||
public static Set<String> getMultivaluedFieldNames() throws SolrServerException, IOException {
|
||||
Set<String> multivaluedFields = new HashSet<String>();
|
||||
LukeRequest lukeRequest = new LukeRequest();
|
||||
lukeRequest.setShowSchema(true);
|
||||
LukeResponse process = lukeRequest.process(solr);
|
||||
Map<String, LukeResponse.FieldInfo> fields = process.getFieldInfo();
|
||||
for(String fieldName : fields.keySet())
|
||||
{
|
||||
LukeResponse.FieldInfo fieldInfo = fields.get(fieldName);
|
||||
EnumSet<FieldFlag> flags = fieldInfo.getFlags();
|
||||
for(FieldFlag fieldFlag : flags)
|
||||
{
|
||||
if(fieldFlag.getAbbreviation() == FieldFlag.MULTI_VALUED.getAbbreviation())
|
||||
{
|
||||
multivaluedFields.add(fieldName);
|
||||
}
|
||||
}
|
||||
}
|
||||
return multivaluedFields;
|
||||
}
|
||||
public static void reindexBitstreamHits(boolean removeDeletedBitstreams) throws Exception {
|
||||
Context context = new Context();
|
||||
|
||||
|
@@ -15,6 +15,7 @@ import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.Collections;
|
||||
import java.util.regex.Pattern;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
@@ -42,10 +43,10 @@ public class SpiderDetector {
|
||||
private static IPTable table = null;
|
||||
|
||||
/** Collection of regular expressions to match known spiders' agents. */
|
||||
private static List<Pattern> agents = new ArrayList<Pattern>();
|
||||
private static List<Pattern> agents = Collections.synchronizedList(new ArrayList<Pattern>());
|
||||
|
||||
/** Collection of regular expressions to match known spiders' domain names. */
|
||||
private static List<Pattern> domains = new ArrayList<Pattern>();
|
||||
private static List<Pattern> domains = Collections.synchronizedList(new ArrayList<Pattern>());
|
||||
|
||||
/**
|
||||
* Utility method which reads lines from a file & returns them in a Set.
|
||||
@@ -199,13 +200,15 @@ public class SpiderDetector {
|
||||
{
|
||||
// See if any agent patterns match
|
||||
if (null != agent)
|
||||
{
|
||||
if (agents.isEmpty())
|
||||
loadPatterns("agents", agents);
|
||||
|
||||
{
|
||||
synchronized(agents)
|
||||
{
|
||||
if (agents.isEmpty())
|
||||
loadPatterns("agents", agents);
|
||||
}
|
||||
for (Pattern candidate : agents)
|
||||
{
|
||||
// prevent matcher() invocation from a null Pattern object
|
||||
// prevent matcher() invocation from a null Pattern object
|
||||
if (null != candidate && candidate.matcher(agent).find())
|
||||
{
|
||||
return true;
|
||||
@@ -230,15 +233,15 @@ public class SpiderDetector {
|
||||
// No. See if any DNS names match
|
||||
if (null != hostname)
|
||||
{
|
||||
if (domains.isEmpty())
|
||||
synchronized(domains)
|
||||
{
|
||||
loadPatterns("domains", domains);
|
||||
if (domains.isEmpty())
|
||||
loadPatterns("domains", domains);
|
||||
}
|
||||
|
||||
for (Pattern candidate : domains)
|
||||
{
|
||||
// prevent matcher() invocation from a null Pattern object
|
||||
if (null != candidate && candidate.matcher(hostname).find())
|
||||
// prevent matcher() invocation from a null Pattern object
|
||||
if (null != candidate && candidate.matcher(hostname).find())
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
@@ -22,8 +22,10 @@ import org.dspace.statistics.SolrLogger;
|
||||
import java.util.Date;
|
||||
import java.text.SimpleDateFormat;
|
||||
|
||||
import com.maxmind.geoip.LookupService;
|
||||
import com.maxmind.geoip.Location;
|
||||
import com.maxmind.geoip2.DatabaseReader;
|
||||
import com.maxmind.geoip2.model.CityResponse;
|
||||
import java.io.File;
|
||||
import java.net.InetAddress;
|
||||
|
||||
/**
|
||||
* Test class to generate random statistics data.
|
||||
@@ -194,17 +196,17 @@ public class StatisticsDataGenerator {
|
||||
solr.commit();
|
||||
|
||||
String prevIp = null;
|
||||
String dbfile = ConfigurationManager.getProperty("usage-statistics", "dbfile");
|
||||
LookupService cl = new LookupService(dbfile,
|
||||
LookupService.GEOIP_STANDARD);
|
||||
String dbPath = ConfigurationManager.getProperty("usage-statistics", "dbfile");
|
||||
File dbFile = new File(dbPath);
|
||||
DatabaseReader cl = new DatabaseReader.Builder(dbFile).build();
|
||||
int countryErrors = 0;
|
||||
for (int i = 0; i < nrLogs; i++) {
|
||||
String ip = "";
|
||||
Date time;
|
||||
String continent;
|
||||
String countryCode;
|
||||
float longitude;
|
||||
float latitude;
|
||||
double longitude;
|
||||
double latitude;
|
||||
String city;
|
||||
|
||||
// 1. Generate an ip for our user
|
||||
@@ -219,9 +221,11 @@ public class StatisticsDataGenerator {
|
||||
ip = ipBuilder.toString();
|
||||
|
||||
// 2 Depending on our ip get all the location info
|
||||
Location location;
|
||||
InetAddress ipAddress;
|
||||
CityResponse location;
|
||||
try {
|
||||
location = cl.getLocation(ip);
|
||||
ipAddress = InetAddress.getByName(ip);
|
||||
location = cl.city(ipAddress);
|
||||
} catch (Exception e) {
|
||||
location = null;
|
||||
}
|
||||
@@ -233,13 +237,14 @@ public class StatisticsDataGenerator {
|
||||
continue;
|
||||
}
|
||||
ip = prevIp;
|
||||
location = cl.getLocation(ip);
|
||||
ipAddress = InetAddress.getByName(ip);
|
||||
location = cl.city(ipAddress);
|
||||
}
|
||||
|
||||
city = location.city;
|
||||
countryCode = location.countryCode;
|
||||
longitude = location.longitude;
|
||||
latitude = location.latitude;
|
||||
city = location.getCity().getName();
|
||||
countryCode = location.getCountry().getIsoCode();
|
||||
longitude = location.getLocation().getLongitude();
|
||||
latitude = location.getLocation().getLatitude();
|
||||
try {
|
||||
continent = LocationUtils.getContinentCode(countryCode);
|
||||
} catch (Exception e) {
|
||||
|
@@ -7,7 +7,29 @@
|
||||
*/
|
||||
package org.dspace.statistics.util;
|
||||
|
||||
import org.apache.commons.cli.*;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileReader;
|
||||
import java.io.FilenameFilter;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.InetAddress;
|
||||
import java.text.DecimalFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import com.maxmind.geoip2.DatabaseReader;
|
||||
import com.maxmind.geoip2.model.CityResponse;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.commons.lang.time.DateFormatUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
@@ -21,13 +43,6 @@ import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.statistics.SolrLogger;
|
||||
|
||||
import java.text.*;
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
|
||||
import com.maxmind.geoip.LookupService;
|
||||
import com.maxmind.geoip.Location;
|
||||
|
||||
/**
|
||||
* Class to load intermediate statistics files (produced from log files by {@link ClassicDSpaceLogConverter}) into Solr.
|
||||
*
|
||||
@@ -45,8 +60,10 @@ public class StatisticsImporter
|
||||
/** Solr server connection */
|
||||
private static HttpSolrServer solr;
|
||||
|
||||
/** GEOIP lookup service */
|
||||
private static LookupService geoipLookup;
|
||||
/**
|
||||
* GEOIP lookup service
|
||||
*/
|
||||
private static DatabaseReader geoipLookup;
|
||||
|
||||
/** Whether to skip the DNS reverse lookup or not */
|
||||
private static boolean skipReverseDNS = false;
|
||||
@@ -170,8 +187,8 @@ public class StatisticsImporter
|
||||
String continent = "";
|
||||
String country = "";
|
||||
String countryCode = "";
|
||||
float longitude = 0f;
|
||||
float latitude = 0f;
|
||||
double longitude = 0f;
|
||||
double latitude = 0f;
|
||||
String city = "";
|
||||
String dns;
|
||||
|
||||
@@ -236,15 +253,15 @@ public class StatisticsImporter
|
||||
}
|
||||
|
||||
// Get the geo information for the user
|
||||
Location location;
|
||||
try {
|
||||
location = geoipLookup.getLocation(ip);
|
||||
city = location.city;
|
||||
country = location.countryName;
|
||||
countryCode = location.countryCode;
|
||||
longitude = location.longitude;
|
||||
latitude = location.latitude;
|
||||
if(verbose) {
|
||||
InetAddress ipAddress = InetAddress.getByName(ip);
|
||||
CityResponse cityResponse = geoipLookup.city(ipAddress);
|
||||
city = cityResponse.getCity().getName();
|
||||
country = cityResponse.getCountry().getName();
|
||||
countryCode = cityResponse.getCountry().getIsoCode();
|
||||
longitude = cityResponse.getLocation().getLongitude();
|
||||
latitude = cityResponse.getLocation().getLatitude();
|
||||
if (verbose) {
|
||||
data += (", country = " + country);
|
||||
data += (", city = " + city);
|
||||
System.out.println(data);
|
||||
@@ -450,18 +467,21 @@ public class StatisticsImporter
|
||||
}
|
||||
solr = new HttpSolrServer(sserver);
|
||||
|
||||
String dbfile = ConfigurationManager.getProperty("usage-statistics", "dbfile");
|
||||
try
|
||||
{
|
||||
geoipLookup = new LookupService(dbfile, LookupService.GEOIP_STANDARD);
|
||||
}
|
||||
catch (FileNotFoundException fe)
|
||||
{
|
||||
log.error("The GeoLite Database file is missing (" + dbfile + ")! Solr Statistics cannot generate location based reports! Please see the DSpace installation instructions for instructions to install this file.", fe);
|
||||
}
|
||||
catch (IOException e)
|
||||
{
|
||||
log.error("Unable to load GeoLite Database file (" + dbfile + ")! You may need to reinstall it. See the DSpace installation instructions for more details.", e);
|
||||
String dbPath = ConfigurationManager.getProperty("usage-statistics", "dbfile");
|
||||
try {
|
||||
File dbFile = new File(dbPath);
|
||||
geoipLookup = new DatabaseReader.Builder(dbFile).build();
|
||||
} catch (FileNotFoundException fe) {
|
||||
log.error(
|
||||
"The GeoLite Database file is missing (" + dbPath + ")! Solr Statistics cannot generate location " +
|
||||
"based reports! Please see the DSpace installation instructions for instructions to install this " +
|
||||
"file.",
|
||||
fe);
|
||||
} catch (IOException e) {
|
||||
log.error(
|
||||
"Unable to load GeoLite Database file (" + dbPath + ")! You may need to reinstall it. See the DSpace " +
|
||||
"installation instructions for more details.",
|
||||
e);
|
||||
}
|
||||
|
||||
|
||||
|
@@ -7,8 +7,13 @@
|
||||
*/
|
||||
package org.dspace.statistics.util;
|
||||
|
||||
import com.maxmind.geoip.Location;
|
||||
import com.maxmind.geoip.LookupService;
|
||||
import java.io.*;
|
||||
import java.net.InetAddress;
|
||||
|
||||
import com.maxmind.geoip2.DatabaseReader;
|
||||
import com.maxmind.geoip2.exception.GeoIp2Exception;
|
||||
import com.maxmind.geoip2.model.CityResponse;
|
||||
|
||||
import org.apache.commons.cli.*;
|
||||
import org.apache.commons.lang.time.DateFormatUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
@@ -24,13 +29,10 @@ import org.dspace.statistics.SolrLogger;
|
||||
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
|
||||
|
||||
import java.io.*;
|
||||
import java.text.DecimalFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Date;
|
||||
@@ -38,8 +40,6 @@ import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
|
||||
/**
|
||||
* Class to load intermediate statistics files (produced from log files by <code>ClassicDSpaceLogConverter</code>) into Elastic Search
|
||||
*
|
||||
@@ -56,7 +56,7 @@ public class StatisticsImporterElasticSearch {
|
||||
//TODO ES Client
|
||||
|
||||
/** GEOIP lookup service */
|
||||
private static LookupService geoipLookup;
|
||||
private static DatabaseReader geoipLookup;
|
||||
|
||||
/** Metadata storage information */
|
||||
private static Map<String, String> metadataStorageInfo;
|
||||
@@ -107,8 +107,8 @@ public class StatisticsImporterElasticSearch {
|
||||
String continent = "";
|
||||
String country = "";
|
||||
String countryCode = "";
|
||||
float longitude = 0f;
|
||||
float latitude = 0f;
|
||||
double longitude = 0f;
|
||||
double latitude = 0f;
|
||||
String city = "";
|
||||
String dns;
|
||||
|
||||
@@ -176,15 +176,15 @@ public class StatisticsImporterElasticSearch {
|
||||
}
|
||||
|
||||
// Get the geo information for the user
|
||||
Location location;
|
||||
try {
|
||||
location = geoipLookup.getLocation(ip);
|
||||
city = location.city;
|
||||
country = location.countryName;
|
||||
countryCode = location.countryCode;
|
||||
longitude = location.longitude;
|
||||
latitude = location.latitude;
|
||||
if(verbose) {
|
||||
InetAddress ipAddress = InetAddress.getByName(ip);
|
||||
CityResponse cityResponse = geoipLookup.city(ipAddress);
|
||||
city = cityResponse.getCity().getName();
|
||||
country = cityResponse.getCountry().getName();
|
||||
countryCode = cityResponse.getCountry().getIsoCode();
|
||||
longitude = cityResponse.getLocation().getLongitude();
|
||||
latitude = cityResponse.getLocation().getLatitude();
|
||||
if (verbose) {
|
||||
data += (", country = " + country);
|
||||
data += (", city = " + city);
|
||||
System.out.println(data);
|
||||
@@ -198,7 +198,7 @@ public class StatisticsImporterElasticSearch {
|
||||
}
|
||||
continue;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
} catch (GeoIp2Exception | IOException e) {
|
||||
// No problem - just can't look them up
|
||||
}
|
||||
|
||||
@@ -386,20 +386,22 @@ public class StatisticsImporterElasticSearch {
|
||||
// Verbose option
|
||||
boolean verbose = line.hasOption('v');
|
||||
|
||||
String dbfile = ConfigurationManager.getProperty("usage-statistics", "dbfile");
|
||||
try
|
||||
{
|
||||
geoipLookup = new LookupService(dbfile, LookupService.GEOIP_STANDARD);
|
||||
String dbPath = ConfigurationManager.getProperty("usage-statistics", "dbfile");
|
||||
try {
|
||||
File dbFile = new File(dbPath);
|
||||
geoipLookup = new DatabaseReader.Builder(dbFile).build();
|
||||
} catch (FileNotFoundException fe) {
|
||||
log.error(
|
||||
"The GeoLite Database file is missing (" + dbPath + ")! ElasticSearch Statistics cannot generate location " +
|
||||
"based reports! Please see the DSpace installation instructions for instructions to install this " +
|
||||
"file.",
|
||||
fe);
|
||||
} catch (IOException e) {
|
||||
log.error(
|
||||
"Unable to load GeoLite Database file (" + dbPath + ")! You may need to reinstall it. See the DSpace " +
|
||||
"installation instructions for more details.",
|
||||
e);
|
||||
}
|
||||
catch (FileNotFoundException fe)
|
||||
{
|
||||
log.error("The GeoLite Database file is missing (" + dbfile + ")! Elastic Search Statistics cannot generate location based reports! Please see the DSpace installation instructions for instructions to install this file.", fe);
|
||||
}
|
||||
catch (IOException e)
|
||||
{
|
||||
log.error("Unable to load GeoLite Database file (" + dbfile + ")! You may need to reinstall it. See the DSpace installation instructions for more details.", e);
|
||||
}
|
||||
|
||||
|
||||
StatisticsImporterElasticSearch elasticSearchImporter = new StatisticsImporterElasticSearch();
|
||||
if (line.hasOption('m'))
|
||||
|
@@ -7,39 +7,21 @@
|
||||
*/
|
||||
package org.dspace.storage.rdbms;
|
||||
|
||||
import java.io.*;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DatabaseMetaData;
|
||||
import java.sql.Date;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.ResultSetMetaData;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.SQLWarning;
|
||||
import java.sql.Statement;
|
||||
import java.sql.Time;
|
||||
import java.sql.Timestamp;
|
||||
import java.sql.Types;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
import javax.naming.InitialContext;
|
||||
import javax.sql.DataSource;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.flywaydb.core.Flyway;
|
||||
import org.flywaydb.core.api.MigrationInfo;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.naming.InitialContext;
|
||||
import javax.sql.DataSource;
|
||||
import java.io.Serializable;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.sql.*;
|
||||
import java.sql.Date;
|
||||
import java.util.*;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* Executes SQL queries.
|
||||
*
|
||||
@@ -343,7 +325,7 @@ public class DatabaseManager
|
||||
try
|
||||
{
|
||||
iterator = query(context, query, parameters);
|
||||
retRow = (!iterator.hasNext()) ? null : iterator.next();
|
||||
retRow = (!iterator.hasNext()) ? null : iterator.next(context);
|
||||
} catch (SQLException e) {
|
||||
log.error("SQL query single Error - ", e);
|
||||
throw e;
|
||||
@@ -389,7 +371,7 @@ public class DatabaseManager
|
||||
|
||||
try
|
||||
{
|
||||
retRow = (!iterator.hasNext()) ? null : iterator.next();
|
||||
retRow = (!iterator.hasNext()) ? null : iterator.next(context);
|
||||
} catch (SQLException e) {
|
||||
log.error("SQL query singleTable Error - ", e);
|
||||
throw e;
|
||||
@@ -475,7 +457,7 @@ public class DatabaseManager
|
||||
|
||||
{
|
||||
try {
|
||||
TableRow row = new TableRow(canonicalize(table), getColumnNames(table));
|
||||
TableRow row = new TableRow(canonicalize(table), getColumnNames(context,table));
|
||||
insert(context, row);
|
||||
return row;
|
||||
} catch (SQLException e) {
|
||||
@@ -505,7 +487,7 @@ public class DatabaseManager
|
||||
String ctable = canonicalize(table);
|
||||
|
||||
try {
|
||||
return findByUnique(context, ctable, getPrimaryKeyColumn(ctable),
|
||||
return findByUnique(context, ctable, getPrimaryKeyColumn(context, ctable),
|
||||
Integer.valueOf(id));
|
||||
} catch (SQLException e) {
|
||||
log.error("SQL find Error - ", e);
|
||||
@@ -571,7 +553,7 @@ public class DatabaseManager
|
||||
{
|
||||
try {
|
||||
String ctable = canonicalize(table);
|
||||
return deleteByValue(context, ctable, getPrimaryKeyColumn(ctable),
|
||||
return deleteByValue(context, ctable, getPrimaryKeyColumn(context, ctable),
|
||||
Integer.valueOf(id));
|
||||
} catch (SQLException e) {
|
||||
log.error("SQL delete Error - ", e);
|
||||
@@ -691,6 +673,10 @@ public class DatabaseManager
|
||||
* @return The newly created row
|
||||
* @throws SQLException
|
||||
*/
|
||||
public static TableRow row(Context context, String table) throws SQLException
|
||||
{
|
||||
return new TableRow(canonicalize(table), getColumnNames(context, table));
|
||||
}
|
||||
public static TableRow row(String table) throws SQLException
|
||||
{
|
||||
return new TableRow(canonicalize(table), getColumnNames(table));
|
||||
@@ -718,7 +704,7 @@ public class DatabaseManager
|
||||
newID = doInsertGeneric(context, row);
|
||||
}
|
||||
|
||||
row.setColumn(getPrimaryKeyColumn(row), newID);
|
||||
row.setColumn(getPrimaryKeyColumn(context, row), newID);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -741,8 +727,8 @@ public class DatabaseManager
|
||||
.append(" set ");
|
||||
|
||||
List<ColumnInfo> columns = new ArrayList<ColumnInfo>();
|
||||
ColumnInfo pk = getPrimaryKeyColumnInfo(table);
|
||||
Collection<ColumnInfo> info = getColumnInfo(table);
|
||||
ColumnInfo pk = getPrimaryKeyColumnInfo(context, table);
|
||||
Collection<ColumnInfo> info = getColumnInfo(context, table);
|
||||
|
||||
String separator = "";
|
||||
for (ColumnInfo col : info)
|
||||
@@ -789,7 +775,7 @@ public class DatabaseManager
|
||||
throw new IllegalArgumentException("Row not associated with a table");
|
||||
}
|
||||
|
||||
String pk = getPrimaryKeyColumn(row);
|
||||
String pk = getPrimaryKeyColumn(context, row);
|
||||
|
||||
if (row.isColumnNull(pk))
|
||||
{
|
||||
@@ -808,9 +794,9 @@ public class DatabaseManager
|
||||
* @exception SQLException
|
||||
* If a database error occurs
|
||||
*/
|
||||
static Collection<ColumnInfo> getColumnInfo(String table) throws SQLException
|
||||
static Collection<ColumnInfo> getColumnInfo(Context context, String table) throws SQLException
|
||||
{
|
||||
Map<String, ColumnInfo> cinfo = getColumnInfoInternal(table);
|
||||
Map<String, ColumnInfo> cinfo = getColumnInfoInternal(context, table);
|
||||
|
||||
return (cinfo == null) ? null : cinfo.values();
|
||||
}
|
||||
@@ -826,14 +812,18 @@ public class DatabaseManager
|
||||
* @exception SQLException
|
||||
* If a database error occurs
|
||||
*/
|
||||
static ColumnInfo getColumnInfo(String table, String column)
|
||||
static ColumnInfo getColumnInfo(Context context, String table, String column)
|
||||
throws SQLException
|
||||
{
|
||||
Map<String, ColumnInfo> info = getColumnInfoInternal(table);
|
||||
Map<String, ColumnInfo> info = getColumnInfoInternal(context, table);
|
||||
|
||||
return (info == null) ? null : info.get(column);
|
||||
}
|
||||
|
||||
static List<String> getColumnNames(String table) throws SQLException{
|
||||
return getColumnNames(null,table);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the names of all the columns of the given table.
|
||||
*
|
||||
@@ -844,10 +834,10 @@ public class DatabaseManager
|
||||
* @exception SQLException
|
||||
* If a database error occurs
|
||||
*/
|
||||
static List<String> getColumnNames(String table) throws SQLException
|
||||
static List<String> getColumnNames(Context context, String table) throws SQLException
|
||||
{
|
||||
List<String> results = new ArrayList<String>();
|
||||
Collection<ColumnInfo> info = getColumnInfo(table);
|
||||
Collection<ColumnInfo> info = getColumnInfo(context, table);
|
||||
|
||||
for (ColumnInfo col : info)
|
||||
{
|
||||
@@ -881,22 +871,22 @@ public class DatabaseManager
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the canonical name for a table.
|
||||
* Return the canonical name for a database object.
|
||||
*
|
||||
* @param table
|
||||
* The name of the table.
|
||||
* @return The canonical name of the table.
|
||||
* @param db_object
|
||||
* The name of the database object.
|
||||
* @return The canonical name of the database object.
|
||||
*/
|
||||
static String canonicalize(String table)
|
||||
static String canonicalize(String db_object)
|
||||
{
|
||||
// Oracle expects upper-case table names
|
||||
// Oracle expects upper-case table names, schemas, etc.
|
||||
if (isOracle)
|
||||
{
|
||||
return (table == null) ? null : table.toUpperCase();
|
||||
return (db_object == null) ? null : db_object.toUpperCase();
|
||||
}
|
||||
|
||||
// default database postgres wants lower-case table names
|
||||
return (table == null) ? null : table.toLowerCase();
|
||||
return (db_object == null) ? null : db_object.toLowerCase();
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
@@ -914,9 +904,21 @@ public class DatabaseManager
|
||||
* @exception SQLException
|
||||
* If a database error occurs
|
||||
*/
|
||||
static TableRow process(Context context, ResultSet results, String table) throws SQLException
|
||||
{
|
||||
return process(context,results, table, null);
|
||||
}
|
||||
static TableRow process(ResultSet results, String table) throws SQLException
|
||||
{
|
||||
return process(results, table, null);
|
||||
return process(null,results, table, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated You should try to pass an existing database connection to this method to prevent opening a new one.
|
||||
*/
|
||||
@Deprecated
|
||||
static TableRow process(ResultSet results, String table, List<String> pColumnNames) throws SQLException{
|
||||
return process(null,results,table,pColumnNames);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -932,14 +934,14 @@ public class DatabaseManager
|
||||
* @exception SQLException
|
||||
* If a database error occurs
|
||||
*/
|
||||
static TableRow process(ResultSet results, String table, List<String> pColumnNames) throws SQLException
|
||||
static TableRow process(Context context, ResultSet results, String table, List<String> pColumnNames) throws SQLException
|
||||
{
|
||||
ResultSetMetaData meta = results.getMetaData();
|
||||
int columns = meta.getColumnCount() + 1;
|
||||
|
||||
// If we haven't been passed the column names try to generate them from the metadata / table
|
||||
List<String> columnNames = pColumnNames != null ? pColumnNames :
|
||||
((table == null) ? getColumnNames(meta) : getColumnNames(table));
|
||||
((table == null) ? getColumnNames(meta) : getColumnNames(context,table));
|
||||
|
||||
TableRow row = new TableRow(canonicalize(table), columnNames);
|
||||
|
||||
@@ -1062,9 +1064,9 @@ public class DatabaseManager
|
||||
* @exception SQLException
|
||||
* If a database error occurs
|
||||
*/
|
||||
public static String getPrimaryKeyColumn(TableRow row) throws SQLException
|
||||
public static String getPrimaryKeyColumn(Context context,TableRow row) throws SQLException
|
||||
{
|
||||
return getPrimaryKeyColumn(row.getTable());
|
||||
return getPrimaryKeyColumn(context,row.getTable());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1079,10 +1081,10 @@ public class DatabaseManager
|
||||
* @exception SQLException
|
||||
* If a database error occurs
|
||||
*/
|
||||
protected static String getPrimaryKeyColumn(String table)
|
||||
protected static String getPrimaryKeyColumn(Context context, String table)
|
||||
throws SQLException
|
||||
{
|
||||
ColumnInfo info = getPrimaryKeyColumnInfo(table);
|
||||
ColumnInfo info = getPrimaryKeyColumnInfo(context, table);
|
||||
|
||||
return (info == null) ? null : info.getName();
|
||||
}
|
||||
@@ -1098,9 +1100,9 @@ public class DatabaseManager
|
||||
* @exception SQLException
|
||||
* If a database error occurs
|
||||
*/
|
||||
static ColumnInfo getPrimaryKeyColumnInfo(String table) throws SQLException
|
||||
static ColumnInfo getPrimaryKeyColumnInfo(Context context, String table) throws SQLException
|
||||
{
|
||||
Collection<ColumnInfo> cinfo = getColumnInfo(canonicalize(table));
|
||||
Collection<ColumnInfo> cinfo = getColumnInfo(context, canonicalize(table));
|
||||
|
||||
for (ColumnInfo info : cinfo)
|
||||
{
|
||||
@@ -1202,7 +1204,7 @@ public class DatabaseManager
|
||||
* @exception SQLException
|
||||
* If a database error occurs
|
||||
*/
|
||||
private static Map<String, ColumnInfo> getColumnInfoInternal(String table) throws SQLException
|
||||
private static Map<String, ColumnInfo> getColumnInfoInternal(Context context, String table) throws SQLException
|
||||
{
|
||||
String ctable = canonicalize(table);
|
||||
Map<String, ColumnInfo> results = info.get(ctable);
|
||||
@@ -1212,7 +1214,7 @@ public class DatabaseManager
|
||||
return results;
|
||||
}
|
||||
|
||||
results = retrieveColumnInfo(ctable);
|
||||
results = retrieveColumnInfo(context, ctable);
|
||||
info.put(ctable, results);
|
||||
|
||||
return results;
|
||||
@@ -1229,7 +1231,7 @@ public class DatabaseManager
|
||||
* If there is a problem retrieving information from the
|
||||
* RDBMS.
|
||||
*/
|
||||
private static Map<String, ColumnInfo> retrieveColumnInfo(String table) throws SQLException
|
||||
private static Map<String, ColumnInfo> retrieveColumnInfo(Context context, String table) throws SQLException
|
||||
{
|
||||
Connection connection = null;
|
||||
ResultSet pkcolumns = null;
|
||||
@@ -1237,10 +1239,6 @@ public class DatabaseManager
|
||||
|
||||
try
|
||||
{
|
||||
String schema = ConfigurationManager.getProperty("db.schema");
|
||||
if(StringUtils.isBlank(schema)){
|
||||
schema = null;
|
||||
}
|
||||
String catalog = null;
|
||||
|
||||
int dotIndex = table.indexOf('.');
|
||||
@@ -1252,8 +1250,15 @@ public class DatabaseManager
|
||||
log.warn("table: " + table);
|
||||
}
|
||||
|
||||
connection = getConnection();
|
||||
if (context != null && !context.getDBConnection().isClosed()) {
|
||||
connection = context.getDBConnection();
|
||||
} else {
|
||||
connection = getConnection();
|
||||
}
|
||||
|
||||
// Get current database schema name
|
||||
String schema = DatabaseUtils.getSchemaName(connection);
|
||||
|
||||
DatabaseMetaData metadata = connection.getMetaData();
|
||||
Map<String, ColumnInfo> results = new HashMap<String, ColumnInfo>();
|
||||
|
||||
@@ -1301,7 +1306,7 @@ public class DatabaseManager
|
||||
try { columns.close(); } catch (SQLException sqle) { }
|
||||
}
|
||||
|
||||
if (connection != null)
|
||||
if (connection != null && context == null) // Only close if connection is newly created in this method
|
||||
{
|
||||
try { connection.close(); } catch (SQLException sqle) { }
|
||||
}
|
||||
@@ -1674,10 +1679,10 @@ public class DatabaseManager
|
||||
{
|
||||
String table = row.getTable();
|
||||
|
||||
Collection<ColumnInfo> info = getColumnInfo(table);
|
||||
Collection<ColumnInfo> info = getColumnInfo(context, table);
|
||||
Collection<ColumnInfo> params = new ArrayList<ColumnInfo>();
|
||||
|
||||
String primaryKey = getPrimaryKeyColumn(table);
|
||||
String primaryKey = getPrimaryKeyColumn(context, table);
|
||||
String sql = insertSQL.get(table);
|
||||
|
||||
boolean firstColumn = true;
|
||||
@@ -1713,7 +1718,7 @@ public class DatabaseManager
|
||||
}
|
||||
}
|
||||
|
||||
sql = insertBuilder.append(valuesBuilder.toString()).append(") RETURNING ").append(getPrimaryKeyColumn(table)).toString();
|
||||
sql = insertBuilder.append(valuesBuilder.toString()).append(") RETURNING ").append(getPrimaryKeyColumn(context, table)).toString();
|
||||
insertSQL.put(table, sql);
|
||||
}
|
||||
else
|
||||
@@ -1828,8 +1833,8 @@ public class DatabaseManager
|
||||
}
|
||||
|
||||
// Set the ID in the table row object
|
||||
row.setColumn(getPrimaryKeyColumn(table), newID);
|
||||
Collection<ColumnInfo> info = getColumnInfo(table);
|
||||
row.setColumn(getPrimaryKeyColumn(context, table), newID);
|
||||
Collection<ColumnInfo> info = getColumnInfo(context, table);
|
||||
|
||||
String sql = insertSQL.get(table);
|
||||
if (sql == null)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user