mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-07 01:54:22 +00:00
Compare commits
489 Commits
6ac823d29a
...
dspace-6.3
Author | SHA1 | Date | |
---|---|---|---|
![]() |
813800ce17 | ||
![]() |
a91cde6cbc | ||
![]() |
1cd1ccc2ab | ||
![]() |
b5f43835b4 | ||
![]() |
558b36c11d | ||
![]() |
5791a5cec0 | ||
![]() |
c0072f19e4 | ||
![]() |
6433ce04c4 | ||
![]() |
9353b0a42c | ||
![]() |
9f80c48e15 | ||
![]() |
4fbf330de9 | ||
![]() |
adfaae33fd | ||
![]() |
b259dbecc6 | ||
![]() |
da0c6e6bda | ||
![]() |
5aaebc3b73 | ||
![]() |
ade12e54d9 | ||
![]() |
d9f4aa6da6 | ||
![]() |
ff6252d660 | ||
![]() |
dd61128b14 | ||
![]() |
c01823119d | ||
![]() |
02d348064f | ||
![]() |
06268855ab | ||
![]() |
a9d83925cd | ||
![]() |
7829eca276 | ||
![]() |
df244882ef | ||
![]() |
22baf1e698 | ||
![]() |
7b2050e1de | ||
![]() |
815568d926 | ||
![]() |
e7e249709c | ||
![]() |
76d3a7e4a1 | ||
![]() |
9193e9a412 | ||
![]() |
d65babd069 | ||
![]() |
4f453430ec | ||
![]() |
513533e3e2 | ||
![]() |
5d6282ebfb | ||
![]() |
72d9712555 | ||
![]() |
68b6314e8d | ||
![]() |
8e0b2c0bbb | ||
![]() |
d032784e59 | ||
![]() |
74ab77f1e9 | ||
![]() |
9b981d963c | ||
![]() |
3d2b5bc03a | ||
![]() |
b51043ed84 | ||
![]() |
79ca53954d | ||
![]() |
cfb758d8ee | ||
![]() |
f5306597f4 | ||
![]() |
b8744f3465 | ||
![]() |
e1b86828b4 | ||
![]() |
34a6625e6b | ||
![]() |
237e06a32e | ||
![]() |
2d37722357 | ||
![]() |
87300f4108 | ||
![]() |
08bf1d8ece | ||
![]() |
c21b2b9899 | ||
![]() |
8c1580bbe5 | ||
![]() |
2904680ab0 | ||
![]() |
a5638327fc | ||
![]() |
17ea05f0d0 | ||
![]() |
bdd252acc5 | ||
![]() |
51ade53ef3 | ||
![]() |
0cd69ef817 | ||
![]() |
e768986e37 | ||
![]() |
a3368dc779 | ||
![]() |
8f13958c2a | ||
![]() |
a47844ef89 | ||
![]() |
4e10c27e84 | ||
![]() |
5f39765960 | ||
![]() |
a8e2ff8c07 | ||
![]() |
9d0d58ab3f | ||
![]() |
d8e80e20c8 | ||
![]() |
944e030bd4 | ||
![]() |
ec307a1c78 | ||
![]() |
ed24a9917e | ||
![]() |
746401bfe5 | ||
![]() |
0835822359 | ||
![]() |
c549d3abca | ||
![]() |
26406ec73e | ||
![]() |
ea5d27c65f | ||
![]() |
fef9550684 | ||
![]() |
9edb231be1 | ||
![]() |
8aaa4695b1 | ||
![]() |
70db7006ed | ||
![]() |
da2369229d | ||
![]() |
fadb48eb54 | ||
![]() |
483bbd9dc2 | ||
![]() |
cf2021aee1 | ||
![]() |
5ebe6f4b4d | ||
![]() |
1a62edaefb | ||
![]() |
ae1920cb43 | ||
![]() |
8b1cf7d6a4 | ||
![]() |
362b81d2ac | ||
![]() |
81bcaa47c4 | ||
![]() |
b70b170657 | ||
![]() |
825b97f6f5 | ||
![]() |
9aabe46c33 | ||
![]() |
9757989336 | ||
![]() |
c4aab55e7c | ||
![]() |
b0a1fb7384 | ||
![]() |
7587d9bd05 | ||
![]() |
ad4680a26c | ||
![]() |
999b2f23be | ||
![]() |
f906915879 | ||
![]() |
8be78e6430 | ||
![]() |
faf5bd1f94 | ||
![]() |
4ea7575e4a | ||
![]() |
7bf2f36300 | ||
![]() |
f354979777 | ||
![]() |
7b1010cc36 | ||
![]() |
24ba583921 | ||
![]() |
1f76a54384 | ||
![]() |
a3774944ad | ||
![]() |
d885ec0d4e | ||
![]() |
b5aba21902 | ||
![]() |
17fb6cab87 | ||
![]() |
0506d2ffe3 | ||
![]() |
df16cde989 | ||
![]() |
5b75b0a3b9 | ||
![]() |
ff2305ee51 | ||
![]() |
79d94d92f9 | ||
![]() |
78d4fb14c1 | ||
![]() |
d6f35fbda1 | ||
![]() |
077a6e99d6 | ||
![]() |
daee0646da | ||
![]() |
6787983574 | ||
![]() |
7592f48064 | ||
![]() |
a61c21f216 | ||
![]() |
b1d56059cd | ||
![]() |
1c17e8e475 | ||
![]() |
7ed10610de | ||
![]() |
7ce43d4027 | ||
![]() |
17907bf442 | ||
![]() |
1b21e0baef | ||
![]() |
7bf537e3ab | ||
![]() |
78ed97c78d | ||
![]() |
fbb0b73b61 | ||
![]() |
a33e886cf0 | ||
![]() |
48b1ac8e18 | ||
![]() |
fb08f721c5 | ||
![]() |
d1edfb5f85 | ||
![]() |
2c5dcf13ef | ||
![]() |
2754a7bad8 | ||
![]() |
6275a59f24 | ||
![]() |
409075e447 | ||
![]() |
4cdfd34a08 | ||
![]() |
5684f24944 | ||
![]() |
2b55ec5c63 | ||
![]() |
5ed105974b | ||
![]() |
81af7f47a7 | ||
![]() |
de1e26b3ee | ||
![]() |
d90e1667b6 | ||
![]() |
d9c7ac61e9 | ||
![]() |
5d8e34c0c3 | ||
![]() |
47a5898fd9 | ||
![]() |
b16b116f54 | ||
![]() |
9654ea87c9 | ||
![]() |
e2975e26ed | ||
![]() |
061298640b | ||
![]() |
e2a771d10d | ||
![]() |
d37670776b | ||
![]() |
49e9e3817e | ||
![]() |
e640106468 | ||
![]() |
3bb04dac4c | ||
![]() |
1ea55a4fe6 | ||
![]() |
31a613cb1a | ||
![]() |
bda2f8709c | ||
![]() |
ff69c1fa8c | ||
![]() |
8f85b764f4 | ||
![]() |
83ec310d0e | ||
![]() |
38d951062c | ||
![]() |
14eef1b409 | ||
![]() |
10dc184824 | ||
![]() |
f8244980f0 | ||
![]() |
1ba1a17c52 | ||
![]() |
9ce4653ffd | ||
![]() |
1b90001420 | ||
![]() |
cf45326276 | ||
![]() |
4304d8a872 | ||
![]() |
5debb078d0 | ||
![]() |
2bf07661bf | ||
![]() |
63e6823b62 | ||
![]() |
6f892e70e8 | ||
![]() |
e5cead0063 | ||
![]() |
dd5a277f7b | ||
![]() |
e8b0a1d86b | ||
![]() |
da5c795804 | ||
![]() |
7f9e2d7bb0 | ||
![]() |
aa0ced3d10 | ||
![]() |
9e6768241b | ||
![]() |
c30018b089 | ||
![]() |
5c334351fa | ||
![]() |
23f2573460 | ||
![]() |
179141dc4a | ||
![]() |
fd1afab6fc | ||
![]() |
e59611b5c7 | ||
![]() |
968487b9d2 | ||
![]() |
a3ea6d5df8 | ||
![]() |
5948e33517 | ||
![]() |
aa69b2220a | ||
![]() |
d7a0e0f560 | ||
![]() |
fc3ea83049 | ||
![]() |
e5cb62997a | ||
![]() |
ff7c7e3d6d | ||
![]() |
068047cb11 | ||
![]() |
0e7c7c0886 | ||
![]() |
a814d177e5 | ||
![]() |
6626901564 | ||
![]() |
6b8f072d3e | ||
![]() |
41d8668331 | ||
![]() |
0082e5b7da | ||
![]() |
3cb60236d0 | ||
![]() |
64022a92fb | ||
![]() |
c3214d6f77 | ||
![]() |
889499105f | ||
![]() |
fa587c52ed | ||
![]() |
6a14047db6 | ||
![]() |
d753c09b22 | ||
![]() |
fbb45ba758 | ||
![]() |
014456e1ed | ||
![]() |
258b4f00e9 | ||
![]() |
3798a12778 | ||
![]() |
bc82adef5e | ||
![]() |
28bbf4b930 | ||
![]() |
d4d61eed68 | ||
![]() |
646936a3d8 | ||
![]() |
9dd6bb0f08 | ||
![]() |
0e2ed31deb | ||
![]() |
1492dfef92 | ||
![]() |
8b6c1acab1 | ||
![]() |
e88924b7da | ||
![]() |
42608e028e | ||
![]() |
801f39daeb | ||
![]() |
7e68165ded | ||
![]() |
cfecf10e81 | ||
![]() |
5d656ea922 | ||
![]() |
62e2ac81fb | ||
![]() |
e9ace604a7 | ||
![]() |
7f91528c1a | ||
![]() |
4881e9da20 | ||
![]() |
eb4d56201a | ||
![]() |
df9fb114ba | ||
![]() |
f3556278aa | ||
![]() |
f6af76c6d8 | ||
![]() |
151a5f8fe2 | ||
![]() |
57044f6698 | ||
![]() |
4954f96f1d | ||
![]() |
972f76e771 | ||
![]() |
e30b0cdec6 | ||
![]() |
a0f226b763 | ||
![]() |
bcf3110db9 | ||
![]() |
c34b277c8d | ||
![]() |
6263444f79 | ||
![]() |
9caff2caab | ||
![]() |
6151f4f594 | ||
![]() |
f953848a6d | ||
![]() |
ccc1b1b784 | ||
![]() |
1bb6369ad6 | ||
![]() |
e31daa0230 | ||
![]() |
762197b452 | ||
![]() |
ecd0230943 | ||
![]() |
c9cad9083e | ||
![]() |
b462e0ac6d | ||
![]() |
65d638771f | ||
![]() |
224df82087 | ||
![]() |
a6b3ce0d46 | ||
![]() |
2944279618 | ||
![]() |
fe115125d1 | ||
![]() |
6e9dec2c85 | ||
![]() |
fd298ae462 | ||
![]() |
470c9b8f50 | ||
![]() |
33d3df72d6 | ||
![]() |
43cc3bd874 | ||
![]() |
3dc4909935 | ||
![]() |
71791c720f | ||
![]() |
70a5124373 | ||
![]() |
7879ecdf14 | ||
![]() |
1db3261b54 | ||
![]() |
593e6a5b37 | ||
![]() |
3732cafc4e | ||
![]() |
6f52d9700a | ||
![]() |
769d3b590f | ||
![]() |
7d04016436 | ||
![]() |
0084ae3833 | ||
![]() |
fc1b22e59c | ||
![]() |
9af33bc244 | ||
![]() |
bd2d81d556 | ||
![]() |
f6eb13cf53 | ||
![]() |
b4a24fff7b | ||
![]() |
8bb7eb0fe5 | ||
![]() |
eb9ce230ad | ||
![]() |
f48178ed41 | ||
![]() |
1b70e64f77 | ||
![]() |
b56bb4de3e | ||
![]() |
139f01fffd | ||
![]() |
257d75ca0c | ||
![]() |
5422a63f08 | ||
![]() |
853e6baff1 | ||
![]() |
205d8b9f92 | ||
![]() |
bb1e13a3b2 | ||
![]() |
d2311663d3 | ||
![]() |
7d1836bddc | ||
![]() |
36002b5829 | ||
![]() |
6392e195b9 | ||
![]() |
d37d3a04ac | ||
![]() |
ef3afe19eb | ||
![]() |
81e171ec24 | ||
![]() |
4086e73e0b | ||
![]() |
5f827ecbe8 | ||
![]() |
30c4ca0fea | ||
![]() |
094f775b6a | ||
![]() |
85588871ca | ||
![]() |
1ad95bdb12 | ||
![]() |
593cc085d2 | ||
![]() |
f4cdfb4e65 | ||
![]() |
b4d8436672 | ||
![]() |
271b6913ab | ||
![]() |
137384c13f | ||
![]() |
72f8f9461b | ||
![]() |
78effeac61 | ||
![]() |
62c804f1e8 | ||
![]() |
40b05ec773 | ||
![]() |
a0e91cacd9 | ||
![]() |
90ca4deb35 | ||
![]() |
83002c3177 | ||
![]() |
ebf256caa1 | ||
![]() |
1d655e97c9 | ||
![]() |
d85a2d9153 | ||
![]() |
6f8a8b7f25 | ||
![]() |
3ea041d4dc | ||
![]() |
6333fb6706 | ||
![]() |
f62c32efe6 | ||
![]() |
068be33265 | ||
![]() |
3c25e04c08 | ||
![]() |
a44b109f7a | ||
![]() |
a24b0078c2 | ||
![]() |
e358cb84d1 | ||
![]() |
0f51d5ad6a | ||
![]() |
454b0c9d6a | ||
![]() |
6e1a5d1df9 | ||
![]() |
b61c821e66 | ||
![]() |
fd76b587be | ||
![]() |
f12006fe21 | ||
![]() |
3116c53d5e | ||
![]() |
e2ffbaa3b8 | ||
![]() |
856e5ad388 | ||
![]() |
2c8e36fcb9 | ||
![]() |
d2577fa16c | ||
![]() |
d5f9d9b0db | ||
![]() |
e4b26d64ce | ||
![]() |
2dde39abe7 | ||
![]() |
a715ae4d15 | ||
![]() |
e63b3f4c13 | ||
![]() |
acedcacdb3 | ||
![]() |
37219a986d | ||
![]() |
a3fc30ad94 | ||
![]() |
e2862b3058 | ||
![]() |
8442e6f395 | ||
![]() |
7e1a0a1a0c | ||
![]() |
a5d414c0b2 | ||
![]() |
cabb4fab66 | ||
![]() |
5c19bb52e0 | ||
![]() |
1e62dfdbbc | ||
![]() |
867ab6c9b9 | ||
![]() |
392dd2653a | ||
![]() |
6f3546f844 | ||
![]() |
9a0d293abf | ||
![]() |
782a963916 | ||
![]() |
0235ba391f | ||
![]() |
eae5a96179 | ||
![]() |
1ef1170159 | ||
![]() |
4f7410232a | ||
![]() |
6c29cd61b6 | ||
![]() |
f6a651d4df | ||
![]() |
c57b443611 | ||
![]() |
a5bdff0803 | ||
![]() |
e3f72b280d | ||
![]() |
63ed4cc1e0 | ||
![]() |
f0a5e7d380 | ||
![]() |
1e64850af2 | ||
![]() |
d9db5a66ca | ||
![]() |
5f77bd441a | ||
![]() |
4b87935cbb | ||
![]() |
3db74c7ba3 | ||
![]() |
f000b280c1 | ||
![]() |
cad79dc6c9 | ||
![]() |
794600b96e | ||
![]() |
044ba1acd3 | ||
![]() |
f54fe5c12e | ||
![]() |
1e917ed845 | ||
![]() |
7719848d47 | ||
![]() |
f0e9e04a3a | ||
![]() |
5f194334ff | ||
![]() |
7371a7c71d | ||
![]() |
3963c95f6e | ||
![]() |
75497f5107 | ||
![]() |
852c4d3b62 | ||
![]() |
d108464a3a | ||
![]() |
dbfc8ce9a7 | ||
![]() |
eee4923518 | ||
![]() |
9ef505498b | ||
![]() |
3540fe5ec6 | ||
![]() |
57f2a10da1 | ||
![]() |
1e33e27a84 | ||
![]() |
a54bf11b8c | ||
![]() |
0601e9f061 | ||
![]() |
b578abd054 | ||
![]() |
bc8629b145 | ||
![]() |
26859b1133 | ||
![]() |
97785d778f | ||
![]() |
f1c3a9d919 | ||
![]() |
6442c979aa | ||
![]() |
a36f5b1f48 | ||
![]() |
36a87c2107 | ||
![]() |
43d7cd564c | ||
![]() |
9d8738c934 | ||
![]() |
c09edc5a15 | ||
![]() |
2d95c7a2a1 | ||
![]() |
d2c43b8aa5 | ||
![]() |
5d9dd4d4e3 | ||
![]() |
24c1f5367c | ||
![]() |
fbaf950388 | ||
![]() |
ddedfa2a14 | ||
![]() |
2b96f9472b | ||
![]() |
1af23f2d8b | ||
![]() |
a868a4bc9b | ||
![]() |
2734dca1cd | ||
![]() |
8c70f9bc8c | ||
![]() |
8d56e828a2 | ||
![]() |
0e8c95a196 | ||
![]() |
cf190c78e8 | ||
![]() |
2d1c59ac49 | ||
![]() |
3a03e7a9d3 | ||
![]() |
757264c1f6 | ||
![]() |
dfe6d79da4 | ||
![]() |
708fe215b0 | ||
![]() |
a51ad3c6eb | ||
![]() |
c5aebee9cc | ||
![]() |
8a06522fa9 | ||
![]() |
267518ebaf | ||
![]() |
2685cd793e | ||
![]() |
36c7fa9c1a | ||
![]() |
54c5c2932b | ||
![]() |
7225f2597a | ||
![]() |
59632413c2 | ||
![]() |
7650af1e69 | ||
![]() |
e4659832a0 | ||
![]() |
ab982e4f0b | ||
![]() |
8d76aa2010 | ||
![]() |
9eb7c6734c | ||
![]() |
99c1af8688 | ||
![]() |
866bfe8fd8 | ||
![]() |
12de02c7f3 | ||
![]() |
0c0b280d05 | ||
![]() |
bf1979fd41 | ||
![]() |
e32b93bae3 | ||
![]() |
f86fff9063 | ||
![]() |
f7cadf8774 | ||
![]() |
4f7520d532 | ||
![]() |
9904fdb412 | ||
![]() |
e0e223e2bf | ||
![]() |
45762e993d | ||
![]() |
ce72010805 | ||
![]() |
faa12bfd33 | ||
![]() |
2805386f9d | ||
![]() |
a62eddeb59 | ||
![]() |
c873e554d3 | ||
![]() |
01dee698c2 | ||
![]() |
eb5dc58384 | ||
![]() |
958631c81c | ||
![]() |
89ded55942 | ||
![]() |
9855022228 | ||
![]() |
bfc68d3354 | ||
![]() |
38848e16d3 | ||
![]() |
0244a425ae | ||
![]() |
c3c5287880 | ||
![]() |
3321cba560 | ||
![]() |
684e87ed20 | ||
![]() |
d2c4233d9e | ||
![]() |
ae9862395a | ||
![]() |
6256c673b9 | ||
![]() |
2b0448fe64 | ||
![]() |
1e4ae0b5e3 | ||
![]() |
1f36899abe | ||
![]() |
a6aa9816d2 | ||
![]() |
242d1357c7 | ||
![]() |
4b927562b6 | ||
![]() |
7b6ea8e807 | ||
![]() |
a3c6aa2ced | ||
![]() |
50eed239f5 | ||
![]() |
3065389435 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -6,6 +6,7 @@ tags
|
||||
|
||||
## Ignore project files created by Eclipse
|
||||
.settings/
|
||||
/bin/
|
||||
.project
|
||||
.classpath
|
||||
|
||||
|
@@ -7,6 +7,8 @@ env:
|
||||
|
||||
# Install prerequisites for building Mirage2 more rapidly
|
||||
before_install:
|
||||
# Remove outdated settings.xml from Travis builds. Workaround for https://github.com/travis-ci/travis-ci/issues/4629
|
||||
- rm ~/.m2/settings.xml
|
||||
# Install Node.js 6.5.0 & print version info
|
||||
- nvm install 6.5.0
|
||||
- node --version
|
||||
@@ -21,10 +23,10 @@ before_install:
|
||||
# Print ruby version info (should be installed)
|
||||
- ruby -v
|
||||
# Install Sass & print version info
|
||||
- gem install sass
|
||||
- gem install sass -v 3.3.14
|
||||
- sass -v
|
||||
# Install Compass & print version info
|
||||
- gem install compass
|
||||
- gem install compass -v 1.0.1
|
||||
- compass version
|
||||
|
||||
# Skip install stage, as we'll do it below
|
||||
|
2
LICENSE
2
LICENSE
@@ -1,7 +1,7 @@
|
||||
DSpace source code license:
|
||||
|
||||
|
||||
Copyright (c) 2002-2016, DuraSpace. All rights reserved.
|
||||
Copyright (c) 2002-2018, DuraSpace. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
|
@@ -27,25 +27,22 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.10.50 - https://aws.amazon.com/sdkforjava)
|
||||
* HPPC Collections (com.carrotsearch:hppc:0.5.2 - http://labs.carrotsearch.com/hppc.html/hppc)
|
||||
* metadata-extractor (com.drewnoakes:metadata-extractor:2.6.2 - http://code.google.com/p/metadata-extractor/)
|
||||
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.5.4 - http://github.com/FasterXML/jackson)
|
||||
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.7.0 - http://github.com/FasterXML/jackson)
|
||||
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.5.4 - https://github.com/FasterXML/jackson)
|
||||
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.7.0 - https://github.com/FasterXML/jackson-core)
|
||||
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.5.4 - http://github.com/FasterXML/jackson)
|
||||
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.7.0 - http://github.com/FasterXML/jackson)
|
||||
* Jackson-JAXRS-base (com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:2.5.4 - http://wiki.fasterxml.com/JacksonHome/jackson-jaxrs-base)
|
||||
* Jackson-JAXRS-JSON (com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:2.5.4 - http://wiki.fasterxml.com/JacksonHome/jackson-jaxrs-json-provider)
|
||||
* Jackson-module-JAXB-annotations (com.fasterxml.jackson.module:jackson-module-jaxb-annotations:2.5.4 - http://wiki.fasterxml.com/JacksonJAXBAnnotations)
|
||||
* Google APIs Client Library for Java (com.google.api-client:google-api-client:1.21.0 - https://github.com/google/google-api-java-client/google-api-client)
|
||||
* Google Analytics API v3-rev123-1.21.0 (com.google.apis:google-api-services-analytics:v3-rev123-1.21.0 - http://nexus.sonatype.org/oss-repository-hosting.html/google-api-services-analytics)
|
||||
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.8.11 - http://github.com/FasterXML/jackson)
|
||||
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.8.11 - https://github.com/FasterXML/jackson-core)
|
||||
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.8.11.1 - http://github.com/FasterXML/jackson)
|
||||
* Jackson-JAXRS-base (com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:2.8.11 - http://github.com/FasterXML/jackson-jaxrs-providers/jackson-jaxrs-base)
|
||||
* Jackson-JAXRS-JSON (com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:2.8.11 - http://github.com/FasterXML/jackson-jaxrs-providers/jackson-jaxrs-json-provider)
|
||||
* Jackson module: JAXB-annotations (com.fasterxml.jackson.module:jackson-module-jaxb-annotations:2.8.11 - http://github.com/FasterXML/jackson-module-jaxb-annotations)
|
||||
* Google APIs Client Library for Java (com.google.api-client:google-api-client:1.23.0 - https://github.com/google/google-api-java-client/google-api-client)
|
||||
* Google Analytics API v3-rev145-1.23.0 (com.google.apis:google-api-services-analytics:v3-rev145-1.23.0 - http://nexus.sonatype.org/oss-repository-hosting.html/google-api-services-analytics)
|
||||
* FindBugs-jsr305 (com.google.code.findbugs:jsr305:3.0.1 - http://findbugs.sourceforge.net/)
|
||||
* Gson (com.google.code.gson:gson:2.6.1 - https://github.com/google/gson/gson)
|
||||
* Guava: Google Core Libraries for Java (com.google.guava:guava:14.0.1 - http://code.google.com/p/guava-libraries/guava)
|
||||
* Guava: Google Core Libraries for Java (com.google.guava:guava:19.0 - https://github.com/google/guava/guava)
|
||||
* Guava: Google Core Libraries for Java (JDK5 Backport) (com.google.guava:guava-jdk5:17.0 - http://code.google.com/p/guava-libraries/guava-jdk5)
|
||||
* Google HTTP Client Library for Java (com.google.http-client:google-http-client:1.21.0 - https://github.com/google/google-http-java-client/google-http-client)
|
||||
* Jackson 2 extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-jackson2:1.21.0 - https://github.com/google/google-http-java-client/google-http-client-jackson2)
|
||||
* Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.21.0 - https://github.com/google/google-oauth-java-client/google-oauth-client)
|
||||
* Google HTTP Client Library for Java (com.google.http-client:google-http-client:1.23.0 - https://github.com/google/google-http-java-client/google-http-client)
|
||||
* Jackson 2 extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-jackson2:1.23.0 - https://github.com/google/google-http-java-client/google-http-client-jackson2)
|
||||
* Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.23.0 - https://github.com/google/google-oauth-java-client/google-oauth-client)
|
||||
* ConcurrentLinkedHashMap (com.googlecode.concurrentlinkedhashmap:concurrentlinkedhashmap-lru:1.2 - http://code.google.com/p/concurrentlinkedhashmap)
|
||||
* ISO Parser (com.googlecode.mp4parser:isoparser:1.0-RC-1 - http://code.google.com/p/mp4parser/)
|
||||
* builder-commons (com.lyncode:builder-commons:1.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/builder-commons)
|
||||
@@ -53,6 +50,8 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Jtwig Core Functions (com.lyncode:jtwig-functions:2.0.1 - http://www.lyncode.com/jtwig-functions)
|
||||
* Jtwig Spring (com.lyncode:jtwig-spring:2.0.1 - http://www.lyncode.com/jtwig-spring)
|
||||
* Test Support (com.lyncode:test-support:1.0.3 - http://nexus.sonatype.org/oss-repository-hosting.html/test-support)
|
||||
* MaxMind DB Reader (com.maxmind.db:maxmind-db:1.2.2 - http://dev.maxmind.com/)
|
||||
* MaxMind GeoIP2 API (com.maxmind.geoip2:geoip2:2.11.0 - http://dev.maxmind.com/geoip/geoip2/web-services)
|
||||
* Spatial4J (com.spatial4j:spatial4j:0.4.1 - https://github.com/spatial4j/spatial4j)
|
||||
* Apache Commons BeanUtils (commons-beanutils:commons-beanutils:1.9.2 - http://commons.apache.org/proper/commons-beanutils/)
|
||||
* Apache Commons CLI (commons-cli:commons-cli:1.3.1 - http://commons.apache.org/proper/commons-cli/)
|
||||
@@ -60,7 +59,7 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Apache Commons Collections (commons-collections:commons-collections:3.2.2 - http://commons.apache.org/collections/)
|
||||
* Apache Commons Configuration (commons-configuration:commons-configuration:1.10 - http://commons.apache.org/configuration/)
|
||||
* Commons Digester (commons-digester:commons-digester:1.8.1 - http://commons.apache.org/digester/)
|
||||
* Apache Commons FileUpload (commons-fileupload:commons-fileupload:1.3.1 - http://commons.apache.org/proper/commons-fileupload/)
|
||||
* Apache Commons FileUpload (commons-fileupload:commons-fileupload:1.3.3 - http://commons.apache.org/proper/commons-fileupload/)
|
||||
* HttpClient (commons-httpclient:commons-httpclient:3.1 - http://jakarta.apache.org/httpcomponents/httpclient-3.x/)
|
||||
* Commons IO (commons-io:commons-io:2.4 - http://commons.apache.org/io/)
|
||||
* commons-jexl (commons-jexl:commons-jexl:1.0 - no url defined)
|
||||
@@ -69,7 +68,6 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Apache Commons Logging (commons-logging:commons-logging:1.2 - http://commons.apache.org/proper/commons-logging/)
|
||||
* Apache Commons Validator (commons-validator:commons-validator:1.5.0 - http://commons.apache.org/proper/commons-validator/)
|
||||
* Boilerpipe -- Boilerplate Removal and Fulltext Extraction from HTML pages (de.l3s.boilerpipe:boilerpipe:1.1.0 - http://code.google.com/p/boilerpipe/)
|
||||
* The Netty Project (io.netty:netty:3.7.0.Final - http://netty.io/)
|
||||
* jakarta-regexp (jakarta-regexp:jakarta-regexp:1.4 - no url defined)
|
||||
* javax.inject (javax.inject:javax.inject:1 - http://code.google.com/p/atinject/)
|
||||
* Bean Validation API (javax.validation:validation-api:1.1.0.Final - http://beanvalidation.org)
|
||||
@@ -84,8 +82,8 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Abdera Core (org.apache.abdera:abdera-core:1.1.3 - http://abdera.apache.org/abdera-core)
|
||||
* I18N Libraries (org.apache.abdera:abdera-i18n:1.1.3 - http://abdera.apache.org)
|
||||
* Abdera Parser (org.apache.abdera:abdera-parser:1.1.3 - http://abdera.apache.org/abdera-parser)
|
||||
* org.apache.tools.ant (org.apache.ant:ant:1.7.0 - http://ant.apache.org/ant/)
|
||||
* ant-launcher (org.apache.ant:ant-launcher:1.7.0 - http://ant.apache.org/ant-launcher/)
|
||||
* Apache Ant Core (org.apache.ant:ant:1.9.1 - http://ant.apache.org/)
|
||||
* Apache Ant Launcher (org.apache.ant:ant-launcher:1.9.1 - http://ant.apache.org/)
|
||||
* Avalon Framework API (org.apache.avalon.framework:avalon-framework-api:4.3.1 - http://www.apache.org/excalibur/avalon-framework/avalon-framework-api/)
|
||||
* Avalon Framework Implementation (org.apache.avalon.framework:avalon-framework-impl:4.3.1 - http://www.apache.org/excalibur/avalon-framework/avalon-framework-impl/)
|
||||
* Cocoon Configuration API (org.apache.cocoon:cocoon-configuration-api:1.0.2 - http://cocoon.apache.org/subprojects/configuration/1.0/configuration-api/1.0/)
|
||||
@@ -111,6 +109,7 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Cocoon XML Implementation (org.apache.cocoon:cocoon-xml-impl:1.0.0 - http://cocoon.apache.org/2.2/core-modules/xml-impl/1.0/)
|
||||
* Cocoon XML Resolver (org.apache.cocoon:cocoon-xml-resolver:1.0.0 - http://cocoon.apache.org/2.2/core-modules/xml-resolver/1.0/)
|
||||
* Cocoon XML Utilities (org.apache.cocoon:cocoon-xml-util:1.0.0 - http://cocoon.apache.org/2.2/core-modules/xml-util/1.0/)
|
||||
* Apache Commons Collections (org.apache.commons:commons-collections4:4.1 - http://commons.apache.org/proper/commons-collections/)
|
||||
* Apache Commons Compress (org.apache.commons:commons-compress:1.7 - http://commons.apache.org/proper/commons-compress/)
|
||||
* Apache Commons CSV (org.apache.commons:commons-csv:1.0 - http://commons.apache.org/proper/commons-csv/)
|
||||
* Apache Commons DBCP (org.apache.commons:commons-dbcp2:2.1.1 - http://commons.apache.org/dbcp/)
|
||||
@@ -174,11 +173,11 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Apache FontBox (org.apache.pdfbox:fontbox:2.0.2 - http://pdfbox.apache.org/)
|
||||
* Apache JempBox (org.apache.pdfbox:jempbox:1.8.4 - http://www.apache.org/pdfbox-parent/jempbox/)
|
||||
* Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.2 - http://www.apache.org/pdfbox-parent/pdfbox/)
|
||||
* Apache POI (org.apache.poi:poi:3.13 - http://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi-ooxml:3.13 - http://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi:3.17 - http://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi-ooxml:3.17 - http://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi-ooxml-schemas:3.10.1 - http://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi-ooxml-schemas:3.13 - http://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi-scratchpad:3.13 - http://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi-ooxml-schemas:3.17 - http://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi-scratchpad:3.17 - http://poi.apache.org/)
|
||||
* Apache Solr Search Server (org.apache.solr:solr:4.10.4 - http://lucene.apache.org/solr-parent/solr)
|
||||
* Apache Solr Analysis Extras (org.apache.solr:solr-analysis-extras:4.10.4 - http://lucene.apache.org/solr-parent/solr-analysis-extras)
|
||||
* Apache Solr Content Extraction Library (org.apache.solr:solr-cell:4.10.4 - http://lucene.apache.org/solr-parent/solr-cell)
|
||||
@@ -191,6 +190,8 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Axiom API (org.apache.ws.commons.axiom:axiom-api:1.2.14 - http://ws.apache.org/axiom/)
|
||||
* Axiom Impl (org.apache.ws.commons.axiom:axiom-impl:1.2.14 - http://ws.apache.org/axiom/)
|
||||
* XmlBeans (org.apache.xmlbeans:xmlbeans:2.6.0 - http://xmlbeans.apache.org)
|
||||
* Apache Yetus - Audience Annotations (org.apache.yetus:audience-annotations:0.5.0 - https://yetus.apache.org/audience-annotations)
|
||||
* zookeeper (org.apache.zookeeper:zookeeper:3.4.11 - no url defined)
|
||||
* zookeeper (org.apache.zookeeper:zookeeper:3.4.6 - no url defined)
|
||||
* Evo Inflector (org.atteo:evo-inflector:1.2.1 - http://atteo.org/static/evo-inflector)
|
||||
* TagSoup (org.ccil.cowan.tagsoup:tagsoup:1.2.1 - http://home.ccil.org/~cowan/XML/tagsoup/)
|
||||
@@ -251,8 +252,7 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* oai4j (se.kb:oai4j:0.6b1 - http://oai4j-client.sourceforge.net/)
|
||||
* StAX API (stax:stax-api:1.0.1 - http://stax.codehaus.org/)
|
||||
* standard (taglibs:standard:1.1.2 - no url defined)
|
||||
* Xalan Java Serializer (xalan:serializer:2.7.2 - http://xml.apache.org/xalan-j/)
|
||||
* Xalan Java (xalan:xalan:2.7.2 - http://xml.apache.org/xalan-j/)
|
||||
* xalan (xalan:xalan:2.7.0 - no url defined)
|
||||
* Xerces2-j (xerces:xercesImpl:2.11.0 - https://xerces.apache.org/xerces2-j/)
|
||||
* xmlParserAPIs (xerces:xmlParserAPIs:2.6.2 - no url defined)
|
||||
* XML Commons External Components XML APIs (xml-apis:xml-apis:1.4.01 - http://xml.apache.org/commons/components/external/)
|
||||
@@ -265,6 +265,7 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* XMP Library for Java (com.adobe.xmp:xmpcore:5.1.2 - http://www.adobe.com/devnet/xmp.html)
|
||||
* coverity-escapers (com.coverity.security:coverity-escapers:1.1.1 - http://coverity.com/security)
|
||||
* JSONLD Java :: Core (com.github.jsonld-java:jsonld-java:0.5.1 - http://github.com/jsonld-java/jsonld-java/jsonld-java/)
|
||||
* curvesapi (com.github.virtuald:curvesapi:1.04 - https://github.com/virtuald/curvesapi)
|
||||
* Protocol Buffer Java API (com.google.protobuf:protobuf-java:2.5.0 - http://code.google.com/p/protobuf)
|
||||
* Jena IRI (com.hp.hpl.jena:iri:0.8 - http://jena.sf.net/iri)
|
||||
* Jena (com.hp.hpl.jena:jena:2.6.4 - http://www.openjena.org/)
|
||||
@@ -274,31 +275,30 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Biblio Transformation Engine :: Core (gr.ekt.bte:bte-core:0.9.3.5 - http://github.com/EKT/Biblio-Transformation-Engine/bte-core)
|
||||
* Biblio Transformation Engine :: Input/Output (gr.ekt.bte:bte-io:0.9.3.5 - http://github.com/EKT/Biblio-Transformation-Engine/bte-io)
|
||||
* jaxen (jaxen:jaxen:1.1.6 - http://jaxen.codehaus.org/)
|
||||
* JLine (jline:jline:0.9.94 - http://jline.sourceforge.net)
|
||||
* ANTLR 3 Runtime (org.antlr:antlr-runtime:3.5 - http://www.antlr.org)
|
||||
* Morfologik FSA (org.carrot2:morfologik-fsa:1.7.1 - http://morfologik.blogspot.com/morfologik-fsa/)
|
||||
* Morfologik Stemming Dictionary for Polish (org.carrot2:morfologik-polish:1.7.1 - http://morfologik.blogspot.com/morfologik-polish/)
|
||||
* Morfologik Stemming APIs (org.carrot2:morfologik-stemming:1.7.1 - http://morfologik.blogspot.com/morfologik-stemming/)
|
||||
* Stax2 API (org.codehaus.woodstox:stax2-api:3.1.1 - http://woodstox.codehaus.org/StAX2)
|
||||
* DSpace Kernel :: API and Implementation (org.dspace:dspace-api:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api)
|
||||
* DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:6.0.3 - https://github.com/dspace/dspace-api-lang)
|
||||
* DSpace JSP-UI (org.dspace:dspace-jspui:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui)
|
||||
* DSpace OAI-PMH (org.dspace:dspace-oai:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai)
|
||||
* DSpace RDF (org.dspace:dspace-rdf:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf)
|
||||
* DSpace REST :: API and Implementation (org.dspace:dspace-rest:6.0-rc4-SNAPSHOT - http://demo.dspace.org)
|
||||
* DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services)
|
||||
* Apache Solr Webapp (org.dspace:dspace-solr:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr)
|
||||
* DSpace SWORD (org.dspace:dspace-sword:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword)
|
||||
* DSpace SWORD v2 (org.dspace:dspace-swordv2:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2)
|
||||
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui)
|
||||
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:6.0.3 - https://github.com/dspace/dspace-xmlui-lang)
|
||||
* DSpace Kernel :: API and Implementation (org.dspace:dspace-api:6.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api)
|
||||
* DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:6.0.5 - https://github.com/dspace/dspace-api-lang)
|
||||
* DSpace JSP-UI (org.dspace:dspace-jspui:6.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui)
|
||||
* DSpace OAI-PMH (org.dspace:dspace-oai:6.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai)
|
||||
* DSpace RDF (org.dspace:dspace-rdf:6.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf)
|
||||
* DSpace REST :: API and Implementation (org.dspace:dspace-rest:6.3-SNAPSHOT - http://demo.dspace.org)
|
||||
* DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:6.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services)
|
||||
* Apache Solr Webapp (org.dspace:dspace-solr:6.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr)
|
||||
* DSpace SWORD (org.dspace:dspace-sword:6.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword)
|
||||
* DSpace SWORD v2 (org.dspace:dspace-swordv2:6.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2)
|
||||
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:6.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui)
|
||||
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:6.0.6 - https://github.com/dspace/dspace-xmlui-lang)
|
||||
* handle (org.dspace:handle:6.2 - no url defined)
|
||||
* jargon (org.dspace:jargon:1.4.25 - no url defined)
|
||||
* mets (org.dspace:mets:1.5.2 - no url defined)
|
||||
* oclc-harvester2 (org.dspace:oclc-harvester2:0.1.12 - no url defined)
|
||||
* XOAI : OAI-PMH Java Toolkit (org.dspace:xoai:3.2.10 - http://nexus.sonatype.org/oss-repository-hosting.html/xoai)
|
||||
* Repackaged Cocoon Servlet Service Implementation (org.dspace.dependencies.cocoon:dspace-cocoon-servlet-service-impl:1.0.3 - http://projects.dspace.org/dspace-pom/dspace-cocoon-servlet-service-impl)
|
||||
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
|
||||
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:6.3-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
|
||||
* Hamcrest All (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all)
|
||||
* Hamcrest Core (org.hamcrest:hamcrest-core:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-core)
|
||||
* JBibTeX (org.jbibtex:jbibtex:1.0.10 - http://www.jbibtex.org)
|
||||
@@ -310,6 +310,10 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* XMLUnit for Java (xmlunit:xmlunit:1.1 - http://xmlunit.sourceforge.net/)
|
||||
* XMLUnit for Java (xmlunit:xmlunit:1.3 - http://xmlunit.sourceforge.net/)
|
||||
|
||||
BSD-Style License:
|
||||
|
||||
* JAXB2 Basics - Runtime (org.jvnet.jaxb2_commons:jaxb2-basics-runtime:0.9.5 - https://github.com/highsource/jaxb2-basics/jaxb2-basics-runtime)
|
||||
|
||||
Common Development and Distribution License (CDDL):
|
||||
|
||||
* JAXB Reference Implementation (com.sun.xml.bind:jaxb-impl:2.2.5 - http://jaxb.java.net/)
|
||||
@@ -366,7 +370,6 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
GNU Lesser General Public License (LGPL):
|
||||
|
||||
* FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/)
|
||||
* MaxMind GeoIP Legacy API (com.maxmind.geoip:geoip-api:1.3.0 - https://github.com/maxmind/geoip-api-java)
|
||||
* JHighlight (com.uwyn:jhighlight:1.0 - https://jhighlight.dev.java.net/)
|
||||
* DSpace TM-Extractors Dependency (org.dspace.dependencies:dspace-tm-extractors:1.0.1 - http://projects.dspace.org/dspace-pom/dspace-tm-extractors)
|
||||
* A Hibernate O/RM Module (org.hibernate:hibernate-core:4.2.21.Final - http://hibernate.org)
|
||||
@@ -391,6 +394,7 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
|
||||
* Bouncy Castle CMS and S/MIME API (org.bouncycastle:bcmail-jdk15:1.46 - http://www.bouncycastle.org/java.html)
|
||||
* Bouncy Castle Provider (org.bouncycastle:bcprov-jdk15:1.46 - http://www.bouncycastle.org/java.html)
|
||||
* ORCID Java API generated via JAXB (org.dspace:orcid-jaxb-api:2.1.0 - https://github.com/DSpace/orcid-jaxb-api)
|
||||
* Main (org.jmockit:jmockit:1.21 - http://www.jmockit.org)
|
||||
* OpenCloud (org.mcavallo:opencloud:0.3 - http://opencloud.mcavallo.org/)
|
||||
* Mockito (org.mockito:mockito-core:1.10.19 - http://www.mockito.org)
|
||||
@@ -406,6 +410,10 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Javassist (org.javassist:javassist:3.18.1-GA - http://www.javassist.org/)
|
||||
* Rhino (rhino:js:1.6R7 - http://www.mozilla.org/rhino/)
|
||||
|
||||
The PostgreSQL License:
|
||||
|
||||
* PostgreSQL JDBC Driver - JDBC 4.2 (org.postgresql:postgresql:42.2.1 - https://github.com/pgjdbc/pgjdbc)
|
||||
|
||||
Public Domain:
|
||||
|
||||
* AOP alliance (aopalliance:aopalliance:1.0 - http://aopalliance.sourceforge.net)
|
||||
@@ -417,9 +425,9 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
|
||||
* JDOM (org.jdom:jdom:1.1.3 - http://www.jdom.org)
|
||||
|
||||
The PostgreSQL License:
|
||||
The JSON License:
|
||||
|
||||
* PostgreSQL JDBC Driver - JDBC 4.2 (org.postgresql:postgresql:9.4.1211 - https://github.com/pgjdbc/pgjdbc)
|
||||
* JSON in Java (org.json:json:20180130 - https://github.com/douglascrockford/JSON-java)
|
||||
|
||||
license.txt:
|
||||
|
||||
|
@@ -12,7 +12,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>6.1-SNAPSHOT</version>
|
||||
<version>6.3</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -266,6 +266,9 @@
|
||||
<include>**/*.xsl</include>
|
||||
<include>**/*.xmap</include>
|
||||
</includes>
|
||||
<excludes>
|
||||
<exclude>**/node/node_modules/**</exclude>
|
||||
</excludes>
|
||||
</validationSet>
|
||||
</validationSets>
|
||||
</configuration>
|
||||
@@ -331,16 +334,6 @@
|
||||
<groupId>org.apache.jena</groupId>
|
||||
<artifactId>apache-jena-libs</artifactId>
|
||||
<type>pom</type>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-cli</groupId>
|
||||
@@ -505,6 +498,11 @@
|
||||
<artifactId>contiperf</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.rometools</groupId>
|
||||
<artifactId>rome-modules</artifactId>
|
||||
@@ -568,9 +566,9 @@
|
||||
<artifactId>commons-configuration</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.maxmind.geoip</groupId>
|
||||
<artifactId>geoip-api</artifactId>
|
||||
<version>1.3.0</version>
|
||||
<groupId>com.maxmind.geoip2</groupId>
|
||||
<artifactId>geoip2</artifactId>
|
||||
<version>2.11.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.ant</groupId>
|
||||
@@ -696,7 +694,7 @@
|
||||
<dependency>
|
||||
<groupId>org.glassfish.jersey.core</groupId>
|
||||
<artifactId>jersey-client</artifactId>
|
||||
<version>2.22.1</version>
|
||||
<version>${jersey.version}</version>
|
||||
</dependency>
|
||||
<!-- S3 -->
|
||||
<dependency>
|
||||
@@ -708,27 +706,19 @@
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<!-- S3 also wanted jackson... -->
|
||||
|
||||
<!-- For ORCID v2 integration -->
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
<version>2.7.0</version>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>orcid-jaxb-api</artifactId>
|
||||
<version>2.1.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>2.7.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-annotations</artifactId>
|
||||
<version>2.7.0</version>
|
||||
<groupId>org.json</groupId>
|
||||
<artifactId>json</artifactId>
|
||||
<version>20180130</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
@@ -196,7 +196,7 @@ public class DSpaceCSV implements Serializable
|
||||
StringBuilder lineBuilder = new StringBuilder();
|
||||
String lineRead;
|
||||
|
||||
while (StringUtils.isNotBlank(lineRead = input.readLine()))
|
||||
while ((lineRead = input.readLine()) != null)
|
||||
{
|
||||
if (lineBuilder.length() > 0) {
|
||||
// Already have a previously read value - add this line
|
||||
|
@@ -34,6 +34,8 @@ public class MetadataExport
|
||||
|
||||
protected ItemService itemService;
|
||||
|
||||
protected Context context;
|
||||
|
||||
/** Whether to export all metadata, or just normally edited metadata */
|
||||
protected boolean exportAll;
|
||||
|
||||
@@ -55,6 +57,7 @@ public class MetadataExport
|
||||
// Store the export settings
|
||||
this.toExport = toExport;
|
||||
this.exportAll = exportAll;
|
||||
this.context = c;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -73,6 +76,7 @@ public class MetadataExport
|
||||
// Try to export the community
|
||||
this.toExport = buildFromCommunity(c, toExport, 0);
|
||||
this.exportAll = exportAll;
|
||||
this.context = c;
|
||||
}
|
||||
catch (SQLException sqle)
|
||||
{
|
||||
@@ -144,13 +148,19 @@ public class MetadataExport
|
||||
{
|
||||
try
|
||||
{
|
||||
Context.Mode originalMode = context.getCurrentMode();
|
||||
context.setMode(Context.Mode.READ_ONLY);
|
||||
|
||||
// Process each item
|
||||
DSpaceCSV csv = new DSpaceCSV(exportAll);
|
||||
while (toExport.hasNext())
|
||||
{
|
||||
csv.addItem(toExport.next());
|
||||
Item item = toExport.next();
|
||||
csv.addItem(item);
|
||||
context.uncacheEntity(item);
|
||||
}
|
||||
|
||||
context.setMode(originalMode);
|
||||
// Return the results
|
||||
return csv;
|
||||
}
|
||||
@@ -224,7 +234,7 @@ public class MetadataExport
|
||||
String filename = line.getOptionValue('f');
|
||||
|
||||
// Create a context
|
||||
Context c = new Context();
|
||||
Context c = new Context(Context.Mode.READ_ONLY);
|
||||
c.turnOffAuthorisationSystem();
|
||||
|
||||
// The things we'll export
|
||||
|
@@ -31,6 +31,7 @@ import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
import org.dspace.workflow.WorkflowService;
|
||||
import org.dspace.workflow.factory.WorkflowServiceFactory;
|
||||
|
||||
@@ -122,6 +123,9 @@ public class MetadataImport
|
||||
// Make the changes
|
||||
try
|
||||
{
|
||||
Context.Mode originalMode = c.getCurrentMode();
|
||||
c.setMode(Context.Mode.BATCH_EDIT);
|
||||
|
||||
// Process each change
|
||||
for (DSpaceCSVLine line : toImport)
|
||||
{
|
||||
@@ -134,11 +138,15 @@ public class MetadataImport
|
||||
throw new MetadataImportException("'action' not allowed for new items!");
|
||||
}
|
||||
|
||||
WorkspaceItem wsItem = null;
|
||||
WorkflowItem wfItem = null;
|
||||
Item item = null;
|
||||
|
||||
// Is this a new item?
|
||||
if (id != null)
|
||||
{
|
||||
// Get the item
|
||||
Item item = itemService.find(c, id);
|
||||
item = itemService.find(c, id);
|
||||
if (item == null)
|
||||
{
|
||||
throw new MetadataImportException("Unknown item ID " + id);
|
||||
@@ -345,8 +353,8 @@ public class MetadataImport
|
||||
// Create the item
|
||||
String collectionHandle = line.get("collection").get(0);
|
||||
collection = (Collection) handleService.resolveToObject(c, collectionHandle);
|
||||
WorkspaceItem wsItem = workspaceItemService.create(c, collection, useTemplate);
|
||||
Item item = wsItem.getItem();
|
||||
wsItem = workspaceItemService.create(c, collection, useTemplate);
|
||||
item = wsItem.getItem();
|
||||
|
||||
// Add the metadata to the item
|
||||
for (BulkEditMetadataValue dcv : whatHasChanged.getAdds())
|
||||
@@ -364,9 +372,9 @@ public class MetadataImport
|
||||
if(useWorkflow){
|
||||
WorkflowService workflowService = WorkflowServiceFactory.getInstance().getWorkflowService();
|
||||
if (workflowNotify) {
|
||||
workflowService.start(c, wsItem);
|
||||
wfItem = workflowService.start(c, wsItem);
|
||||
} else {
|
||||
workflowService.startWithoutNotify(c, wsItem);
|
||||
wfItem = workflowService.startWithoutNotify(c, wsItem);
|
||||
}
|
||||
}
|
||||
else
|
||||
@@ -394,7 +402,16 @@ public class MetadataImport
|
||||
// Record the changes
|
||||
changes.add(whatHasChanged);
|
||||
}
|
||||
|
||||
if (change) {
|
||||
//only clear cache if changes have been made.
|
||||
c.uncacheEntity(wsItem);
|
||||
c.uncacheEntity(wfItem);
|
||||
c.uncacheEntity(item);
|
||||
}
|
||||
}
|
||||
|
||||
c.setMode(originalMode);
|
||||
}
|
||||
catch (MetadataImportException mie)
|
||||
{
|
||||
|
@@ -91,16 +91,16 @@ public class MetadataImportInvalidHeadingException extends Exception
|
||||
{
|
||||
if (type == SCHEMA)
|
||||
{
|
||||
return "Unknown metadata schema in row " + column + ": " + badHeading;
|
||||
return "Unknown metadata schema in column " + column + ": " + badHeading;
|
||||
} else if (type == ELEMENT)
|
||||
{
|
||||
return "Unknown metadata element in row " + column + ": " + badHeading;
|
||||
return "Unknown metadata element in column " + column + ": " + badHeading;
|
||||
} else if (type == MISSING)
|
||||
{
|
||||
return "Row with missing header: Row " + column;
|
||||
return "Row with missing header: column " + column;
|
||||
} else
|
||||
{
|
||||
return "Bad metadata declaration in row " + column + ": " + badHeading;
|
||||
return "Bad metadata declaration in column" + column + ": " + badHeading;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -7,36 +7,32 @@
|
||||
*/
|
||||
package org.dspace.app.harvest;
|
||||
|
||||
import org.apache.commons.cli.*;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.harvest.HarvestedCollection;
|
||||
import org.dspace.harvest.HarvestingException;
|
||||
import org.dspace.harvest.OAIHarvester;
|
||||
import org.dspace.harvest.factory.HarvestServiceFactory;
|
||||
import org.dspace.harvest.service.HarvestedCollectionService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.harvest.HarvestedCollection;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.harvest.HarvestingException;
|
||||
import org.dspace.harvest.OAIHarvester;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.harvest.factory.HarvestServiceFactory;
|
||||
import org.dspace.harvest.service.HarvestedCollectionService;
|
||||
|
||||
/**
|
||||
* Test class for harvested collections.
|
||||
*
|
||||
@@ -91,7 +87,7 @@ public class Harvest
|
||||
HelpFormatter myhelp = new HelpFormatter();
|
||||
myhelp.printHelp("Harvest\n", options);
|
||||
System.out
|
||||
.println("\nPING OAI server: Harvest -g -s oai_source -i oai_set_id");
|
||||
.println("\nPING OAI server: Harvest -g -a oai_source -i oai_set_id");
|
||||
System.out
|
||||
.println("RUNONCE harvest with arbitrary options: Harvest -o -e eperson -c collection -t harvest_type -a oai_source -i oai_set_id -m metadata_format");
|
||||
System.out
|
||||
@@ -162,7 +158,7 @@ public class Harvest
|
||||
|
||||
// Instantiate our class
|
||||
Harvest harvester = new Harvest();
|
||||
harvester.context = new Context();
|
||||
harvester.context = new Context(Context.Mode.BATCH_EDIT);
|
||||
|
||||
|
||||
// Check our options
|
||||
@@ -375,6 +371,8 @@ public class Harvest
|
||||
Item item = it.next();
|
||||
System.out.println("Deleting: " + item.getHandle());
|
||||
collectionService.removeItem(context, collection, item);
|
||||
context.uncacheEntity(item);
|
||||
|
||||
// Dispatch events every 50 items
|
||||
if (i%50 == 0) {
|
||||
context.dispatchEvents();
|
||||
|
@@ -176,7 +176,7 @@ public class ItemExportCLITool {
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
Context c = new Context();
|
||||
Context c = new Context(Context.Mode.READ_ONLY);
|
||||
c.turnOffAuthorisationSystem();
|
||||
|
||||
if (myType == Constants.ITEM)
|
||||
|
@@ -7,44 +7,28 @@
|
||||
*/
|
||||
package org.dspace.app.itemexport;
|
||||
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.sql.SQLException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.*;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
|
||||
import javax.mail.MessagingException;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import org.dspace.app.itemexport.service.ItemExportService;
|
||||
import org.dspace.content.*;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.service.BitstreamService;
|
||||
import org.dspace.content.service.CommunityService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.I18nUtil;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.core.Utils;
|
||||
import org.dspace.core.Email;
|
||||
import org.dspace.core.*;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import javax.mail.MessagingException;
|
||||
import java.io.*;
|
||||
import java.sql.SQLException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.*;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
|
||||
/**
|
||||
* Item exporter to create simple AIPs for DSpace content. Currently exports
|
||||
* individual items, or entire collections. For instructions on use, see
|
||||
@@ -129,7 +113,9 @@ public class ItemExportServiceImpl implements ItemExportService
|
||||
}
|
||||
|
||||
System.out.println("Exporting item to " + mySequenceNumber);
|
||||
exportItem(c, i.next(), fullPath, mySequenceNumber, migrate, excludeBitstreams);
|
||||
Item item = i.next();
|
||||
exportItem(c, item, fullPath, mySequenceNumber, migrate, excludeBitstreams);
|
||||
c.uncacheEntity(item);
|
||||
mySequenceNumber++;
|
||||
}
|
||||
}
|
||||
@@ -282,7 +268,7 @@ public class ItemExportServiceImpl implements ItemExportService
|
||||
("date".equals(metadataField.getElement()) && "accessioned".equals(qualifier)) ||
|
||||
("date".equals(metadataField.getElement()) && "available".equals(qualifier)) ||
|
||||
("identifier".equals(metadataField.getElement()) && "uri".equals(qualifier) &&
|
||||
(dcv.getValue() != null && dcv.getValue().startsWith("http://hdl.handle.net/" +
|
||||
(dcv.getValue() != null && dcv.getValue().startsWith(handleService.getCanonicalPrefix() +
|
||||
handleService.getPrefix() + "/"))) ||
|
||||
("description".equals(metadataField.getElement()) && "provenance".equals(qualifier)) ||
|
||||
("format".equals(metadataField.getElement()) && "extent".equals(qualifier)) ||
|
||||
|
@@ -189,7 +189,7 @@ public class ItemImportCLITool {
|
||||
String zipfilename = "";
|
||||
if (line.hasOption('z')) {
|
||||
zip = true;
|
||||
zipfilename = sourcedir + System.getProperty("file.separator") + line.getOptionValue('z');
|
||||
zipfilename = line.getOptionValue('z');
|
||||
}
|
||||
|
||||
//By default assume collections will be given on the command line
|
||||
@@ -294,7 +294,7 @@ public class ItemImportCLITool {
|
||||
myloader.setQuiet(isQuiet);
|
||||
|
||||
// create a context
|
||||
Context c = new Context();
|
||||
Context c = new Context(Context.Mode.BATCH_EDIT);
|
||||
|
||||
// find the EPerson, assign to context
|
||||
EPerson myEPerson = null;
|
||||
|
@@ -14,21 +14,6 @@ import gr.ekt.bte.core.TransformationSpec;
|
||||
import gr.ekt.bte.dataloader.FileDataLoader;
|
||||
import gr.ekt.bteio.generators.DSpaceOutputGenerator;
|
||||
import gr.ekt.bteio.loaders.OAIPMHDataLoader;
|
||||
|
||||
import java.io.*;
|
||||
import java.net.URL;
|
||||
import java.sql.SQLException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.*;
|
||||
import java.util.zip.ZipFile;
|
||||
import java.util.zip.ZipEntry;
|
||||
|
||||
import javax.mail.MessagingException;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.transform.TransformerException;
|
||||
|
||||
import org.apache.commons.collections.ComparatorUtils;
|
||||
import org.apache.commons.io.FileDeleteStrategy;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
@@ -46,18 +31,14 @@ import org.dspace.authorize.service.ResourcePolicyService;
|
||||
import org.dspace.content.*;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.service.*;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.Email;
|
||||
import org.dspace.core.I18nUtil;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.core.*;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.eperson.service.GroupService;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.dspace.utils.DSpace;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
import org.dspace.workflow.WorkflowService;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
@@ -67,6 +48,19 @@ import org.w3c.dom.Node;
|
||||
import org.w3c.dom.NodeList;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
import javax.mail.MessagingException;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.transform.TransformerException;
|
||||
import java.io.*;
|
||||
import java.net.URL;
|
||||
import java.sql.SQLException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.*;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipFile;
|
||||
|
||||
|
||||
/**
|
||||
* Import items into DSpace. The conventional use is upload files by copying
|
||||
@@ -341,7 +335,8 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
{
|
||||
clist = mycollections;
|
||||
}
|
||||
addItem(c, clist, sourceDir, dircontents[i], mapOut, template);
|
||||
Item item = addItem(c, clist, sourceDir, dircontents[i], mapOut, template);
|
||||
c.uncacheEntity(item);
|
||||
System.out.println(i + " " + dircontents[i]);
|
||||
}
|
||||
}
|
||||
@@ -414,7 +409,9 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
handleOut.close();
|
||||
|
||||
deleteItem(c, oldItem);
|
||||
addItem(c, mycollections, sourceDir, newItemName, null, template);
|
||||
Item newItem = addItem(c, mycollections, sourceDir, newItemName, null, template);
|
||||
c.uncacheEntity(oldItem);
|
||||
c.uncacheEntity(newItem);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -445,6 +442,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
Item myitem = itemService.findByIdOrLegacyId(c, itemID);
|
||||
System.out.println("Deleting item " + itemID);
|
||||
deleteItem(c, myitem);
|
||||
c.uncacheEntity(myitem);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -470,6 +468,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
// create workspace item
|
||||
Item myitem = null;
|
||||
WorkspaceItem wi = null;
|
||||
WorkflowItem wfi = null;
|
||||
|
||||
if (!isTest)
|
||||
{
|
||||
@@ -495,9 +494,9 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
{
|
||||
// Should we send a workflow alert email or not?
|
||||
if (useWorkflowSendEmail) {
|
||||
workflowService.start(c, wi);
|
||||
wfi = workflowService.start(c, wi);
|
||||
} else {
|
||||
workflowService.startWithoutNotify(c, wi);
|
||||
wfi = workflowService.startWithoutNotify(c, wi);
|
||||
}
|
||||
|
||||
// send ID to the mapfile
|
||||
@@ -553,6 +552,10 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
mapOut.println(mapOutputString);
|
||||
}
|
||||
|
||||
//Clear intermediary objects from the cache
|
||||
c.uncacheEntity(wi);
|
||||
c.uncacheEntity(wfi);
|
||||
|
||||
return myitem;
|
||||
}
|
||||
|
||||
@@ -590,6 +593,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
else
|
||||
{
|
||||
deleteItem(c, myitem);
|
||||
c.uncacheEntity(myitem);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -712,6 +716,10 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
{
|
||||
value = "";
|
||||
}
|
||||
else
|
||||
{
|
||||
value = value.trim();
|
||||
}
|
||||
// //getElementData(n, "element");
|
||||
String element = getAttributeValue(n, "element");
|
||||
String qualifier = getAttributeValue(n, "qualifier"); //NodeValue();
|
||||
@@ -733,8 +741,8 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
{
|
||||
qualifier = null;
|
||||
}
|
||||
|
||||
if (!isTest)
|
||||
// only add metadata if it is no test and there is a real value
|
||||
if (!isTest && !value.equals(""))
|
||||
{
|
||||
itemService.addMetadata(c, i, schema, element, qualifier, language, value);
|
||||
}
|
||||
|
@@ -9,12 +9,18 @@ package org.dspace.app.itemupdate;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FilenameFilter;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileWriter;
|
||||
import java.io.FilenameFilter;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
@@ -22,6 +28,7 @@ import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.ItemService;
|
||||
@@ -30,6 +37,8 @@ import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -75,6 +84,7 @@ public class ItemUpdate {
|
||||
|
||||
protected static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
protected static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
protected static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
|
||||
|
||||
static
|
||||
{
|
||||
@@ -353,15 +363,11 @@ public class ItemUpdate {
|
||||
|
||||
pr("ItemUpdate - initializing run on " + (new Date()).toString());
|
||||
|
||||
context = new Context();
|
||||
context = new Context(Context.Mode.BATCH_EDIT);
|
||||
iu.setEPerson(context, iu.eperson);
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
HANDLE_PREFIX = ConfigurationManager.getProperty("handle.canonical.prefix");
|
||||
if (HANDLE_PREFIX == null || HANDLE_PREFIX.length() == 0)
|
||||
{
|
||||
HANDLE_PREFIX = "http://hdl.handle.net/";
|
||||
}
|
||||
HANDLE_PREFIX = handleService.getCanonicalPrefix();
|
||||
|
||||
iu.processArchive(context, sourcedir, itemField, metadataIndexName, alterProvenance, isTest);
|
||||
|
||||
@@ -460,6 +466,7 @@ public class ItemUpdate {
|
||||
{
|
||||
Item item = itarch.getItem();
|
||||
itemService.update(context, item); //need to update before commit
|
||||
context.uncacheEntity(item);
|
||||
}
|
||||
ItemUpdate.pr("Item " + dirname + " completed");
|
||||
successItemCount++;
|
||||
|
@@ -44,18 +44,20 @@ public interface FormatFilter
|
||||
public String getFormatString();
|
||||
|
||||
/**
|
||||
* @return string to describe the newly-generated Bitstream's - how it was
|
||||
* @return string to describe the newly-generated Bitstream - how it was
|
||||
* produced is a good idea
|
||||
*/
|
||||
public String getDescription();
|
||||
|
||||
/**
|
||||
* Read the source stream and produce the filtered content.
|
||||
*
|
||||
* @param item Item
|
||||
* @param source
|
||||
* input stream
|
||||
* @param verbose verbosity flag
|
||||
*
|
||||
* @return result of filter's transformation, written out to a bitstream
|
||||
* @return result of filter's transformation as a byte stream.
|
||||
* @throws Exception if error
|
||||
*/
|
||||
public InputStream getDestinationStream(Item item, InputStream source, boolean verbose)
|
||||
|
@@ -46,11 +46,4 @@ public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter {
|
||||
}
|
||||
}
|
||||
|
||||
public static final String[] PDF = {"Adobe PDF"};
|
||||
@Override
|
||||
public String[] getInputMIMETypes()
|
||||
{
|
||||
return PDF;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -23,6 +23,7 @@ import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Context;
|
||||
import org.im4java.core.ConvertCmd;
|
||||
import org.im4java.core.Info;
|
||||
import org.im4java.core.IM4JavaException;
|
||||
import org.im4java.core.IMOperation;
|
||||
import org.im4java.process.ProcessStarter;
|
||||
@@ -34,175 +35,171 @@ import org.dspace.core.ConfigurationManager;
|
||||
* thumbnail.maxwidth, thumbnail.maxheight, the size we want our thumbnail to be
|
||||
* no bigger than. Creates only JPEGs.
|
||||
*/
|
||||
public abstract class ImageMagickThumbnailFilter extends MediaFilter implements SelfRegisterInputFormats
|
||||
{
|
||||
protected static int width = 180;
|
||||
protected static int height = 120;
|
||||
private static boolean flatten = true;
|
||||
static String bitstreamDescription = "IM Thumbnail";
|
||||
static final String defaultPattern = "Generated Thumbnail";
|
||||
static Pattern replaceRegex = Pattern.compile(defaultPattern);
|
||||
protected final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
|
||||
static {
|
||||
String pre = ImageMagickThumbnailFilter.class.getName();
|
||||
String s = ConfigurationManager.getProperty(pre + ".ProcessStarter");
|
||||
ProcessStarter.setGlobalSearchPath(s);
|
||||
width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width);
|
||||
height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height);
|
||||
public abstract class ImageMagickThumbnailFilter extends MediaFilter {
|
||||
protected static int width = 180;
|
||||
protected static int height = 120;
|
||||
private static boolean flatten = true;
|
||||
static String bitstreamDescription = "IM Thumbnail";
|
||||
static final String defaultPattern = "Generated Thumbnail";
|
||||
static Pattern replaceRegex = Pattern.compile(defaultPattern);
|
||||
protected final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
|
||||
static String cmyk_profile;
|
||||
static String srgb_profile;
|
||||
|
||||
static {
|
||||
String pre = ImageMagickThumbnailFilter.class.getName();
|
||||
String s = ConfigurationManager.getProperty(pre + ".ProcessStarter");
|
||||
ProcessStarter.setGlobalSearchPath(s);
|
||||
width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width);
|
||||
height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height);
|
||||
flatten = ConfigurationManager.getBooleanProperty(pre + ".flatten", flatten);
|
||||
String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription");
|
||||
if (description != null) {
|
||||
bitstreamDescription = description;
|
||||
}
|
||||
try {
|
||||
String patt = ConfigurationManager.getProperty(pre + ".replaceRegex");
|
||||
replaceRegex = Pattern.compile(patt == null ? defaultPattern : patt);
|
||||
} catch(PatternSyntaxException e) {
|
||||
System.err.println("Invalid thumbnail replacement pattern: "+e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public ImageMagickThumbnailFilter() {
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getFilteredName(String oldFilename)
|
||||
{
|
||||
return oldFilename + ".jpg";
|
||||
}
|
||||
|
||||
/**
|
||||
* @return String bundle name
|
||||
*
|
||||
*/
|
||||
@Override
|
||||
public String getBundleName()
|
||||
{
|
||||
return "THUMBNAIL";
|
||||
}
|
||||
|
||||
/**
|
||||
* @return String bitstreamformat
|
||||
*/
|
||||
@Override
|
||||
public String getFormatString()
|
||||
{
|
||||
return "JPEG";
|
||||
}
|
||||
|
||||
/**
|
||||
* @return String bitstreamDescription
|
||||
*/
|
||||
@Override
|
||||
public String getDescription()
|
||||
{
|
||||
return bitstreamDescription;
|
||||
}
|
||||
|
||||
public File inputStreamToTempFile(InputStream source, String prefix, String suffix) throws IOException {
|
||||
File f = File.createTempFile(prefix, suffix);
|
||||
f.deleteOnExit();
|
||||
FileOutputStream fos = new FileOutputStream(f);
|
||||
|
||||
byte[] buffer = new byte[1024];
|
||||
int len = source.read(buffer);
|
||||
while (len != -1) {
|
||||
fos.write(buffer, 0, len);
|
||||
len = source.read(buffer);
|
||||
}
|
||||
fos.close();
|
||||
return f;
|
||||
}
|
||||
|
||||
public File getThumbnailFile(File f, boolean verbose) throws IOException, InterruptedException, IM4JavaException {
|
||||
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
|
||||
f2.deleteOnExit();
|
||||
ConvertCmd cmd = new ConvertCmd();
|
||||
IMOperation op = new IMOperation();
|
||||
op.addImage(f.getAbsolutePath());
|
||||
op.thumbnail(width, height);
|
||||
op.addImage(f2.getAbsolutePath());
|
||||
if (verbose) {
|
||||
System.out.println("IM Thumbnail Param: "+op);
|
||||
}
|
||||
cmd.run(op);
|
||||
return f2;
|
||||
}
|
||||
|
||||
public File getImageFile(File f, int page, boolean verbose) throws IOException, InterruptedException, IM4JavaException {
|
||||
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
|
||||
f2.deleteOnExit();
|
||||
ConvertCmd cmd = new ConvertCmd();
|
||||
IMOperation op = new IMOperation();
|
||||
String s = "[" + page + "]";
|
||||
op.addImage(f.getAbsolutePath()+s);
|
||||
if (flatten)
|
||||
{
|
||||
op.flatten();
|
||||
String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription");
|
||||
cmyk_profile = ConfigurationManager.getProperty(pre + ".cmyk_profile");
|
||||
srgb_profile = ConfigurationManager.getProperty(pre + ".srgb_profile");
|
||||
if (description != null) {
|
||||
bitstreamDescription = description;
|
||||
}
|
||||
try {
|
||||
String patt = ConfigurationManager.getProperty(pre + ".replaceRegex");
|
||||
replaceRegex = Pattern.compile(patt == null ? defaultPattern : patt);
|
||||
} catch (PatternSyntaxException e) {
|
||||
System.err.println("Invalid thumbnail replacement pattern: " + e.getMessage());
|
||||
}
|
||||
op.addImage(f2.getAbsolutePath());
|
||||
if (verbose) {
|
||||
System.out.println("IM Image Param: "+op);
|
||||
}
|
||||
cmd.run(op);
|
||||
return f2;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose)
|
||||
throws Exception
|
||||
{
|
||||
String nsrc = source.getName();
|
||||
for(Bundle b: itemService.getBundles(item, "THUMBNAIL")) {
|
||||
for(Bitstream bit: b.getBitstreams()) {
|
||||
String n = bit.getName();
|
||||
if (n != null) {
|
||||
if (nsrc != null) {
|
||||
if (!n.startsWith(nsrc)) continue;
|
||||
}
|
||||
}
|
||||
String description = bit.getDescription();
|
||||
//If anything other than a generated thumbnail is found, halt processing
|
||||
if (description != null) {
|
||||
if (replaceRegex.matcher(description).matches()) {
|
||||
if (verbose) {
|
||||
System.out.println(description + " " + nsrc + " matches pattern and is replacable.");
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (description.equals(bitstreamDescription)) {
|
||||
if (verbose) {
|
||||
System.out.println(bitstreamDescription + " " + nsrc + " is replacable.");
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
System.out.println("Custom Thumbnail exists for " + nsrc + " for item " + item.getHandle() + ". Thumbnail will not be generated. ");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return true; //assume that the thumbnail is a custom one
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] getInputMIMETypes()
|
||||
{
|
||||
return ImageIO.getReaderMIMETypes();
|
||||
}
|
||||
public ImageMagickThumbnailFilter() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] getInputDescriptions()
|
||||
{
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public String getFilteredName(String oldFilename) {
|
||||
return oldFilename + ".jpg";
|
||||
}
|
||||
|
||||
/**
|
||||
* @return String bundle name
|
||||
*
|
||||
*/
|
||||
@Override
|
||||
public String getBundleName() {
|
||||
return "THUMBNAIL";
|
||||
}
|
||||
|
||||
/**
|
||||
* @return String bitstreamformat
|
||||
*/
|
||||
@Override
|
||||
public String getFormatString() {
|
||||
return "JPEG";
|
||||
}
|
||||
|
||||
/**
|
||||
* @return String bitstreamDescription
|
||||
*/
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return bitstreamDescription;
|
||||
}
|
||||
|
||||
public File inputStreamToTempFile(InputStream source, String prefix, String suffix) throws IOException {
|
||||
File f = File.createTempFile(prefix, suffix);
|
||||
f.deleteOnExit();
|
||||
FileOutputStream fos = new FileOutputStream(f);
|
||||
|
||||
byte[] buffer = new byte[1024];
|
||||
int len = source.read(buffer);
|
||||
while (len != -1) {
|
||||
fos.write(buffer, 0, len);
|
||||
len = source.read(buffer);
|
||||
}
|
||||
fos.close();
|
||||
return f;
|
||||
}
|
||||
|
||||
public File getThumbnailFile(File f, boolean verbose)
|
||||
throws IOException, InterruptedException, IM4JavaException {
|
||||
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
|
||||
f2.deleteOnExit();
|
||||
ConvertCmd cmd = new ConvertCmd();
|
||||
IMOperation op = new IMOperation();
|
||||
op.autoOrient();
|
||||
op.addImage(f.getAbsolutePath());
|
||||
op.thumbnail(width, height);
|
||||
op.addImage(f2.getAbsolutePath());
|
||||
if (verbose) {
|
||||
System.out.println("IM Thumbnail Param: " + op);
|
||||
}
|
||||
cmd.run(op);
|
||||
return f2;
|
||||
}
|
||||
|
||||
public File getImageFile(File f, int page, boolean verbose)
|
||||
throws IOException, InterruptedException, IM4JavaException {
|
||||
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
|
||||
f2.deleteOnExit();
|
||||
ConvertCmd cmd = new ConvertCmd();
|
||||
IMOperation op = new IMOperation();
|
||||
String s = "[" + page + "]";
|
||||
op.addImage(f.getAbsolutePath() + s);
|
||||
if (flatten) {
|
||||
op.flatten();
|
||||
}
|
||||
// PDFs using the CMYK color system can be handled specially if
|
||||
// profiles are defined
|
||||
if (cmyk_profile != null && srgb_profile != null) {
|
||||
Info imageInfo = new Info(f.getAbsolutePath(), true);
|
||||
String imageClass = imageInfo.getImageClass();
|
||||
if (imageClass.contains("CMYK")) {
|
||||
op.profile(cmyk_profile);
|
||||
op.profile(srgb_profile);
|
||||
}
|
||||
}
|
||||
op.addImage(f2.getAbsolutePath());
|
||||
if (verbose) {
|
||||
System.out.println("IM Image Param: " + op);
|
||||
}
|
||||
cmd.run(op);
|
||||
return f2;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose) throws Exception {
|
||||
String nsrc = source.getName();
|
||||
for (Bundle b : itemService.getBundles(item, "THUMBNAIL")) {
|
||||
for (Bitstream bit : b.getBitstreams()) {
|
||||
String n = bit.getName();
|
||||
if (n != null) {
|
||||
if (nsrc != null) {
|
||||
if (!n.startsWith(nsrc))
|
||||
continue;
|
||||
}
|
||||
}
|
||||
String description = bit.getDescription();
|
||||
// If anything other than a generated thumbnail
|
||||
// is found, halt processing
|
||||
if (description != null) {
|
||||
if (replaceRegex.matcher(description).matches()) {
|
||||
if (verbose) {
|
||||
System.out.println(description + " " + nsrc
|
||||
+ " matches pattern and is replacable.");
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (description.equals(bitstreamDescription)) {
|
||||
if (verbose) {
|
||||
System.out.println(bitstreamDescription + " " + nsrc
|
||||
+ " is replacable.");
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
System.out.println("Custom Thumbnail exists for " + nsrc + " for item "
|
||||
+ item.getHandle() + ". Thumbnail will not be generated. ");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true; // assume that the thumbnail is a custom one
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] getInputExtensions()
|
||||
{
|
||||
return ImageIO.getReaderFileSuffixes();
|
||||
}
|
||||
}
|
||||
|
@@ -7,9 +7,6 @@
|
||||
*/
|
||||
package org.dspace.app.mediafilter;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.*;
|
||||
|
||||
import org.dspace.app.mediafilter.service.MediaFilterService;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.content.*;
|
||||
@@ -24,6 +21,10 @@ import org.dspace.services.ConfigurationService;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* MediaFilterManager is the class that invokes the media/format filters over the
|
||||
* repository's content. A few command line flags affect the operation of the
|
||||
@@ -161,6 +162,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
++processed;
|
||||
}
|
||||
// clear item objects from context cache and internal cache
|
||||
c.uncacheEntity(currentItem);
|
||||
currentItem = null;
|
||||
}
|
||||
}
|
||||
@@ -313,12 +315,10 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
// get bitstream filename, calculate destination filename
|
||||
String newName = formatFilter.getFilteredName(source.getName());
|
||||
|
||||
Bitstream existingBitstream = null; // is there an existing rendition?
|
||||
Bundle targetBundle = null; // bundle we're modifying
|
||||
|
||||
List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName());
|
||||
|
||||
// check if destination bitstream exists
|
||||
Bundle existingBundle = null;
|
||||
Bitstream existingBitstream = null;
|
||||
List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName());
|
||||
if (bundles.size() > 0)
|
||||
{
|
||||
// only finds the last match (FIXME?)
|
||||
@@ -326,14 +326,13 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
List<Bitstream> bitstreams = bundle.getBitstreams();
|
||||
|
||||
for (Bitstream bitstream : bitstreams) {
|
||||
if (bitstream.getName().equals(newName)) {
|
||||
targetBundle = bundle;
|
||||
if (bitstream.getName().trim().equals(newName.trim())) {
|
||||
existingBundle = bundle;
|
||||
existingBitstream = bitstream;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// if exists and overwrite = false, exit
|
||||
if (!overWrite && (existingBitstream != null))
|
||||
{
|
||||
@@ -351,67 +350,76 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
+ " (item: " + item.getHandle() + ")");
|
||||
}
|
||||
|
||||
InputStream destStream;
|
||||
try {
|
||||
System.out.println("File: " + newName);
|
||||
destStream = formatFilter.getDestinationStream(item, bitstreamService.retrieve(context, source), isVerbose);
|
||||
System.out.println("File: " + newName);
|
||||
|
||||
// start filtering of the bitstream, using try with resource to close all InputStreams properly
|
||||
try (
|
||||
// get the source stream
|
||||
InputStream srcStream = bitstreamService.retrieve(context, source);
|
||||
// filter the source stream to produce the destination stream
|
||||
// this is the hard work, check for OutOfMemoryErrors at the end of the try clause.
|
||||
InputStream destStream = formatFilter.getDestinationStream(item, srcStream, isVerbose);
|
||||
) {
|
||||
if (destStream == null) {
|
||||
if (!isQuiet) {
|
||||
System.out.println("SKIPPED: bitstream " + source.getID()
|
||||
+ " (item: " + item.getHandle() + ") because filtering was unsuccessful");
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
Bundle targetBundle; // bundle we're modifying
|
||||
if (bundles.size() < 1)
|
||||
{
|
||||
// create new bundle if needed
|
||||
targetBundle = bundleService.create(context, item, formatFilter.getBundleName());
|
||||
}
|
||||
else
|
||||
{
|
||||
// take the first match as we already looked out for the correct bundle name
|
||||
targetBundle = bundles.get(0);
|
||||
}
|
||||
|
||||
// create bitstream to store the filter result
|
||||
Bitstream b = bitstreamService.create(context, targetBundle, destStream);
|
||||
// set the name, source and description of the bitstream
|
||||
b.setName(context, newName);
|
||||
b.setSource(context, "Written by FormatFilter " + formatFilter.getClass().getName() +
|
||||
" on " + DCDate.getCurrent() + " (GMT).");
|
||||
b.setDescription(context, formatFilter.getDescription());
|
||||
// Set the format of the bitstream
|
||||
BitstreamFormat bf = bitstreamFormatService.findByShortDescription(context,
|
||||
formatFilter.getFormatString());
|
||||
bitstreamService.setFormat(context, b, bf);
|
||||
bitstreamService.update(context, b);
|
||||
|
||||
//Set permissions on the derivative bitstream
|
||||
//- First remove any existing policies
|
||||
authorizeService.removeAllPolicies(context, b);
|
||||
|
||||
//- Determine if this is a public-derivative format
|
||||
if(publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) {
|
||||
//- Set derivative bitstream to be publicly accessible
|
||||
Group anonymous = groupService.findByName(context, Group.ANONYMOUS);
|
||||
authorizeService.addPolicy(context, b, Constants.READ, anonymous);
|
||||
} else {
|
||||
//- Inherit policies from the source bitstream
|
||||
authorizeService.inheritPolicies(context, source, b);
|
||||
}
|
||||
|
||||
//do post-processing of the generated bitstream
|
||||
formatFilter.postProcessBitstream(context, item, b);
|
||||
|
||||
|
||||
} catch (OutOfMemoryError oome) {
|
||||
System.out.println("!!! OutOfMemoryError !!!");
|
||||
return false;
|
||||
}
|
||||
|
||||
// create new bundle if needed
|
||||
if (bundles.size() < 1)
|
||||
{
|
||||
targetBundle = bundleService.create(context, item, formatFilter.getBundleName());
|
||||
}
|
||||
else
|
||||
{
|
||||
// take the first match
|
||||
targetBundle = bundles.get(0);
|
||||
}
|
||||
|
||||
Bitstream b = bitstreamService.create(context, targetBundle, destStream);
|
||||
|
||||
// Now set the format and name of the bitstream
|
||||
b.setName(context, newName);
|
||||
b.setSource(context, "Written by FormatFilter " + formatFilter.getClass().getName() +
|
||||
" on " + DCDate.getCurrent() + " (GMT).");
|
||||
b.setDescription(context, formatFilter.getDescription());
|
||||
|
||||
// Find the proper format
|
||||
BitstreamFormat bf = bitstreamFormatService.findByShortDescription(context,
|
||||
formatFilter.getFormatString());
|
||||
bitstreamService.setFormat(context, b, bf);
|
||||
bitstreamService.update(context, b);
|
||||
|
||||
//Set permissions on the derivative bitstream
|
||||
//- First remove any existing policies
|
||||
authorizeService.removeAllPolicies(context, b);
|
||||
|
||||
//- Determine if this is a public-derivative format
|
||||
if(publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) {
|
||||
//- Set derivative bitstream to be publicly accessible
|
||||
Group anonymous = groupService.findByName(context, Group.ANONYMOUS);
|
||||
authorizeService.addPolicy(context, b, Constants.READ, anonymous);
|
||||
} else {
|
||||
//- Inherit policies from the source bitstream
|
||||
authorizeService.inheritPolicies(context, source, b);
|
||||
}
|
||||
|
||||
// fixme - set date?
|
||||
// we are overwriting, so remove old bitstream
|
||||
if (existingBitstream != null)
|
||||
{
|
||||
bundleService.removeBitstream(context, targetBundle, existingBitstream);
|
||||
bundleService.removeBitstream(context, existingBundle, existingBitstream);
|
||||
}
|
||||
|
||||
if (!isQuiet)
|
||||
@@ -420,9 +428,6 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
+ " (item: " + item.getHandle() + ") and created '" + newName + "'");
|
||||
}
|
||||
|
||||
//do post-processing of the generated bitstream
|
||||
formatFilter.postProcessBitstream(context, item, b);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@@ -0,0 +1,81 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.mediafilter;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.InputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.poi.POITextExtractor;
|
||||
import org.apache.poi.extractor.ExtractorFactory;
|
||||
import org.apache.poi.openxml4j.exceptions.OpenXML4JException;
|
||||
import org.apache.xmlbeans.XmlException;
|
||||
import org.dspace.content.Item;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Extract flat text from Microsoft Word documents (.doc, .docx).
|
||||
*/
|
||||
public class PoiWordFilter
|
||||
extends MediaFilter
|
||||
{
|
||||
private static final Logger LOG = LoggerFactory.getLogger(PoiWordFilter.class);
|
||||
|
||||
@Override
|
||||
public String getFilteredName(String oldFilename)
|
||||
{
|
||||
return oldFilename + ".txt";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getBundleName()
|
||||
{
|
||||
return "TEXT";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getFormatString()
|
||||
{
|
||||
return "Text";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription()
|
||||
{
|
||||
return "Extracted text";
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
|
||||
throws Exception
|
||||
{
|
||||
String text;
|
||||
try
|
||||
{
|
||||
// get input stream from bitstream, pass to filter, get string back
|
||||
POITextExtractor extractor = ExtractorFactory.createExtractor(source);
|
||||
text = extractor.getText();
|
||||
}
|
||||
catch (IOException | OpenXML4JException | XmlException e)
|
||||
{
|
||||
System.err.format("Invalid File Format: %s%n", e.getMessage());
|
||||
LOG.error("Unable to parse the bitstream: ", e);
|
||||
throw e;
|
||||
}
|
||||
|
||||
// if verbose flag is set, print out extracted text to STDOUT
|
||||
if (verbose)
|
||||
{
|
||||
System.out.println(text);
|
||||
}
|
||||
|
||||
// return the extracted text as a stream.
|
||||
return new ByteArrayInputStream(text.getBytes());
|
||||
}
|
||||
}
|
@@ -7,26 +7,14 @@
|
||||
*/
|
||||
package org.dspace.app.packager;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.commons.cli.*;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.crosswalk.CrosswalkException;
|
||||
import org.dspace.content.packager.PackageDisseminator;
|
||||
import org.dspace.content.packager.PackageException;
|
||||
import org.dspace.content.packager.PackageParameters;
|
||||
import org.dspace.content.packager.PackageIngester;
|
||||
import org.dspace.content.packager.PackageParameters;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.factory.CoreServiceFactory;
|
||||
@@ -36,6 +24,10 @@ import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.workflow.WorkflowException;
|
||||
|
||||
import java.io.*;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Command-line interface to the Packager plugin.
|
||||
* <p>
|
||||
@@ -331,6 +323,7 @@ public class Packager
|
||||
//If we are in REPLACE mode
|
||||
if(pkgParams.replaceModeEnabled())
|
||||
{
|
||||
context.setMode(Context.Mode.BATCH_EDIT);
|
||||
PackageIngester sip = (PackageIngester) pluginService
|
||||
.getNamedPlugin(PackageIngester.class, myPackager.packageType);
|
||||
if (sip == null)
|
||||
@@ -394,6 +387,8 @@ public class Packager
|
||||
//else if normal SUBMIT mode (or basic RESTORE mode -- which is a special type of submission)
|
||||
else if (myPackager.submit || pkgParams.restoreModeEnabled())
|
||||
{
|
||||
context.setMode(Context.Mode.BATCH_EDIT);
|
||||
|
||||
PackageIngester sip = (PackageIngester) pluginService
|
||||
.getNamedPlugin(PackageIngester.class, myPackager.packageType);
|
||||
if (sip == null)
|
||||
@@ -445,6 +440,8 @@ public class Packager
|
||||
}// else, if DISSEMINATE mode
|
||||
else
|
||||
{
|
||||
context.setMode(Context.Mode.READ_ONLY);
|
||||
|
||||
//retrieve specified package disseminator
|
||||
PackageDisseminator dip = (PackageDisseminator) pluginService
|
||||
.getNamedPlugin(PackageDisseminator.class, myPackager.packageType);
|
||||
|
@@ -7,28 +7,9 @@
|
||||
*/
|
||||
package org.dspace.app.sitemap;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.net.URLEncoder;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.cli.*;
|
||||
import org.apache.commons.lang.ArrayUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
@@ -42,6 +23,16 @@ import org.dspace.core.LogManager;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
import java.io.*;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.net.URLEncoder;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Command-line utility for generating HTML and Sitemaps.org protocol Sitemaps.
|
||||
*
|
||||
@@ -188,7 +179,7 @@ public class GenerateSitemaps
|
||||
+ "?map=", null);
|
||||
}
|
||||
|
||||
Context c = new Context();
|
||||
Context c = new Context(Context.Mode.READ_ONLY);
|
||||
|
||||
List<Community> comms = communityService.findAll(c);
|
||||
|
||||
@@ -201,6 +192,8 @@ public class GenerateSitemaps
|
||||
if (makeSitemapOrg) {
|
||||
sitemapsOrg.addURL(url, null);
|
||||
}
|
||||
|
||||
c.uncacheEntity(comm);
|
||||
}
|
||||
|
||||
List<Collection> colls = collectionService.findAll(c);
|
||||
@@ -214,6 +207,8 @@ public class GenerateSitemaps
|
||||
if (makeSitemapOrg) {
|
||||
sitemapsOrg.addURL(url, null);
|
||||
}
|
||||
|
||||
c.uncacheEntity(coll);
|
||||
}
|
||||
|
||||
Iterator<Item> allItems = itemService.findAll(c);
|
||||
@@ -234,6 +229,8 @@ public class GenerateSitemaps
|
||||
sitemapsOrg.addURL(url, lastMod);
|
||||
}
|
||||
|
||||
c.uncacheEntity(i);
|
||||
|
||||
itemCount++;
|
||||
}
|
||||
|
||||
|
@@ -0,0 +1,112 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.util;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.BitstreamFormat;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.core.Context;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* This comparator is used to order files of an item, so that they are ordered in a way that the first one
|
||||
* is the most useful for use in the citation_pdf_url for Google Scholar
|
||||
*/
|
||||
public class GoogleBitstreamComparator implements Comparator<Bitstream>{
|
||||
|
||||
private final static Logger log = Logger.getLogger(GoogleBitstreamComparator.class);
|
||||
|
||||
HashMap<String, Integer> priorityMap = new HashMap<>();
|
||||
|
||||
private Context context;
|
||||
|
||||
public GoogleBitstreamComparator(Context context, Map<String, String> googleScholarSettings) {
|
||||
this.context = context;
|
||||
String[] shortDescriptions;
|
||||
if (googleScholarSettings.containsKey("citation.prioritized_types")){
|
||||
shortDescriptions = splitAndTrim(googleScholarSettings.get("citation.prioritized_types"));
|
||||
} else {
|
||||
log.warn("Please define citation.prioritized_types in google-metadata.properties");
|
||||
shortDescriptions = new String[0];
|
||||
}
|
||||
int priority = 1;
|
||||
for(String s: shortDescriptions){
|
||||
try {
|
||||
BitstreamFormat format = ContentServiceFactory.getInstance().getBitstreamFormatService().findByShortDescription(context, s);
|
||||
if (format != null) {
|
||||
priorityMap.put(format.getMIMEType(), priority);
|
||||
} else {
|
||||
log.warn(s + " is not a valid short description, please add it to bitstream-formats.xml");
|
||||
}
|
||||
priority++;
|
||||
} catch (SQLException e){
|
||||
log.error(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private String[] splitAndTrim(String toSplit){
|
||||
if(toSplit != null) {
|
||||
String[] splittedArray = toSplit.split(",");
|
||||
for (int i = 0; i < splittedArray.length; i++)
|
||||
splittedArray[i] = splittedArray[i].trim();
|
||||
return splittedArray;
|
||||
}
|
||||
else {
|
||||
return new String[0];
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Compares two bitstreams based on their mimetypes, if mimetypes are the same,then the largest bitstream comes first
|
||||
* See google-metadata.properties to define the order
|
||||
* @param b1 first bitstream
|
||||
* @param b2 second bitstream
|
||||
* @return
|
||||
*/
|
||||
public int compare(Bitstream b1, Bitstream b2) {
|
||||
int priority1 = getPriorityFromBitstream(b1);
|
||||
int priority2 = getPriorityFromBitstream(b2);
|
||||
|
||||
if(priority1 > priority2){
|
||||
return 1;
|
||||
}
|
||||
else if(priority1 == priority2){
|
||||
if(b1.getSize() <= b2.getSize()){
|
||||
return 1;
|
||||
}
|
||||
else {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
else {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
private int getPriorityFromBitstream(Bitstream bitstream) {
|
||||
try {
|
||||
String check = bitstream.getFormat(context).getMIMEType();
|
||||
if (priorityMap.containsKey(bitstream.getFormat(context).getMIMEType())) {
|
||||
return priorityMap.get(bitstream.getFormat(context).getMIMEType());
|
||||
} else {
|
||||
return Integer.MAX_VALUE;
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
log.error(e.getMessage());
|
||||
return Integer.MAX_VALUE;
|
||||
}
|
||||
}
|
||||
}
|
@@ -7,38 +7,30 @@
|
||||
*/
|
||||
package org.dspace.app.util;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.authorize.factory.AuthorizeServiceFactory;
|
||||
import org.dspace.content.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.Enumeration;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.jdom.Element;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.sql.SQLException;
|
||||
import java.util.*;
|
||||
import java.util.Collection;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
/**
|
||||
* Configuration and mapping for Google Scholar output metadata
|
||||
* @author Sands Fish
|
||||
@@ -60,7 +52,7 @@ public class GoogleMetadata
|
||||
protected String itemURL;
|
||||
|
||||
// Configuration keys and fields
|
||||
protected static Map<String, String> configuredFields = new HashMap<String, String>();
|
||||
protected static Map<String, String> googleScholarSettings = new HashMap<String, String>();
|
||||
|
||||
// Google field names (e.g. citation_fieldname) and formatted metadata
|
||||
// values
|
||||
@@ -128,6 +120,8 @@ public class GoogleMetadata
|
||||
|
||||
protected final int ALL_FIELDS_IN_OPTION = 2;
|
||||
|
||||
private static GoogleBitstreamComparator googleBitstreamComparator = null;
|
||||
|
||||
// Load configured fields from google-metadata.properties
|
||||
static
|
||||
{
|
||||
@@ -181,7 +175,7 @@ public class GoogleMetadata
|
||||
if (null != name && !name.equals("") && null != field
|
||||
&& !field.equals(""))
|
||||
{
|
||||
configuredFields.put(name.trim(), field.trim());
|
||||
googleScholarSettings.put(name.trim(), field.trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -200,9 +194,9 @@ public class GoogleMetadata
|
||||
{
|
||||
log.debug("Google Metadata Configuration Mapping:");
|
||||
|
||||
for (String name : configuredFields.keySet())
|
||||
for (String name : googleScholarSettings.keySet())
|
||||
{
|
||||
log.debug(" " + name + " => " + configuredFields.get(name));
|
||||
log.debug(" " + name + " => " + googleScholarSettings.get(name));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -221,6 +215,7 @@ public class GoogleMetadata
|
||||
this.item = item;
|
||||
this.itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
itemURL = HandleServiceFactory.getInstance().getHandleService().resolveToURL(context, item.getHandle());
|
||||
googleBitstreamComparator = new GoogleBitstreamComparator(context, googleScholarSettings);
|
||||
parseItem();
|
||||
}
|
||||
|
||||
@@ -234,7 +229,7 @@ public class GoogleMetadata
|
||||
protected boolean addSingleField(String fieldName)
|
||||
{
|
||||
|
||||
String config = configuredFields.get(fieldName);
|
||||
String config = googleScholarSettings.get(fieldName);
|
||||
|
||||
if (null == config || config.equals(""))
|
||||
{
|
||||
@@ -738,7 +733,7 @@ public class GoogleMetadata
|
||||
addSingleField(PATENT_NUMBER);
|
||||
|
||||
// Use config value for patent country. Should be a literal.
|
||||
String countryConfig = configuredFields.get(PATENT_COUNTRY);
|
||||
String countryConfig = googleScholarSettings.get(PATENT_COUNTRY);
|
||||
if (null != countryConfig && !countryConfig.trim().equals(""))
|
||||
{
|
||||
metadataMappings.put(PATENT_COUNTRY, countryConfig.trim());
|
||||
@@ -1051,10 +1046,13 @@ public class GoogleMetadata
|
||||
*/
|
||||
protected Bitstream findLinkableFulltext(Item item) throws SQLException {
|
||||
Bitstream bestSoFar = null;
|
||||
int bitstreamCount = 0;
|
||||
|
||||
List<Bundle> contentBundles = itemService.getBundles(item, "ORIGINAL");
|
||||
|
||||
for (Bundle bundle : contentBundles) {
|
||||
List<Bitstream> bitstreams = bundle.getBitstreams();
|
||||
Collections.sort(bitstreams, googleBitstreamComparator);
|
||||
|
||||
for (Bitstream candidate : bitstreams) {
|
||||
if (candidate.equals(bundle.getPrimaryBitstream())) { // is primary -> use this one
|
||||
if (isPublic(candidate)) {
|
||||
@@ -1097,7 +1095,7 @@ public class GoogleMetadata
|
||||
protected void addAggregateValues(String field, String delimiter)
|
||||
{
|
||||
|
||||
String authorConfig = configuredFields.get(field);
|
||||
String authorConfig = googleScholarSettings.get(field);
|
||||
ArrayList<MetadataValue> fields = resolveMetadataFields(authorConfig);
|
||||
|
||||
if (null != fields && !fields.isEmpty())
|
||||
@@ -1125,7 +1123,7 @@ public class GoogleMetadata
|
||||
*/
|
||||
protected void addMultipleValues(String FIELD)
|
||||
{
|
||||
String fieldConfig = configuredFields.get(FIELD);
|
||||
String fieldConfig = googleScholarSettings.get(FIELD);
|
||||
ArrayList<MetadataValue> fields = resolveMetadataFields(fieldConfig);
|
||||
|
||||
if (null != fields && !fields.isEmpty())
|
||||
@@ -1146,7 +1144,7 @@ public class GoogleMetadata
|
||||
protected boolean itemIsDissertation()
|
||||
{
|
||||
|
||||
String dConfig = configuredFields.get(DISSERTATION_ID);
|
||||
String dConfig = googleScholarSettings.get(DISSERTATION_ID);
|
||||
if (null == dConfig || dConfig.trim().equals(""))
|
||||
{
|
||||
return false;
|
||||
@@ -1165,7 +1163,7 @@ public class GoogleMetadata
|
||||
protected boolean itemIsPatent()
|
||||
{
|
||||
|
||||
String dConfig = configuredFields.get(PATENT_ID);
|
||||
String dConfig = googleScholarSettings.get(PATENT_ID);
|
||||
if (null == dConfig || dConfig.trim().equals(""))
|
||||
{
|
||||
return false;
|
||||
@@ -1184,7 +1182,7 @@ public class GoogleMetadata
|
||||
protected boolean itemIsTechReport()
|
||||
{
|
||||
|
||||
String dConfig = configuredFields.get(TECH_REPORT_ID);
|
||||
String dConfig = googleScholarSettings.get(TECH_REPORT_ID);
|
||||
if (null == dConfig || dConfig.trim().equals(""))
|
||||
{
|
||||
return false;
|
||||
|
@@ -7,16 +7,20 @@
|
||||
*/
|
||||
package org.dspace.app.util;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.app.util.service.MetadataExposureService;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* Static utility class to manage configuration for exposure (hiding) of
|
||||
* certain Item metadata fields.
|
||||
@@ -65,6 +69,9 @@ public class MetadataExposureServiceImpl implements MetadataExposureService
|
||||
@Autowired(required = true)
|
||||
protected AuthorizeService authorizeService;
|
||||
|
||||
@Autowired(required = true)
|
||||
protected ConfigurationService configurationService;
|
||||
|
||||
protected MetadataExposureServiceImpl()
|
||||
{
|
||||
|
||||
@@ -132,12 +139,11 @@ public class MetadataExposureServiceImpl implements MetadataExposureService
|
||||
hiddenElementSets = new HashMap<>();
|
||||
hiddenElementMaps = new HashMap<>();
|
||||
|
||||
Enumeration pne = ConfigurationManager.propertyNames();
|
||||
while (pne.hasMoreElements())
|
||||
{
|
||||
String key = (String)pne.nextElement();
|
||||
List<String> propertyKeys = configurationService.getPropertyKeys();
|
||||
for (String key : propertyKeys) {
|
||||
if (key.startsWith(CONFIG_PREFIX))
|
||||
{
|
||||
if (configurationService.getBooleanProperty(key, true)){
|
||||
String mdField = key.substring(CONFIG_PREFIX.length());
|
||||
String segment[] = mdField.split("\\.", 3);
|
||||
|
||||
@@ -174,6 +180,7 @@ public class MetadataExposureServiceImpl implements MetadataExposureService
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1041,6 +1041,7 @@ public class ShibAuthentication implements AuthenticationMethod
|
||||
*
|
||||
* This method will not interpret the header value in any way.
|
||||
*
|
||||
* This method will return null if value is empty.
|
||||
*
|
||||
* @param request The HTTP request to look for values in.
|
||||
* @param name The name of the attribute or header
|
||||
@@ -1064,6 +1065,17 @@ public class ShibAuthentication implements AuthenticationMethod
|
||||
value = request.getHeader(name.toLowerCase());
|
||||
if (StringUtils.isEmpty(value))
|
||||
value = request.getHeader(name.toUpperCase());
|
||||
|
||||
// Added extra check for empty value of an attribute.
|
||||
// In case that value is Empty, it should not be returned, return 'null' instead.
|
||||
// This prevents passing empty value to other methods, stops the authentication process
|
||||
// and prevents creation of 'empty' DSpace EPerson if autoregister == true and it subsequent
|
||||
// authentication.
|
||||
if (StringUtils.isEmpty(value))
|
||||
{
|
||||
log.debug("ShibAuthentication - attribute " + name + " is empty!");
|
||||
return null;
|
||||
}
|
||||
|
||||
boolean reconvertAttributes =
|
||||
configurationService.getBooleanProperty(
|
||||
|
@@ -47,15 +47,10 @@ public class AuthorityServiceImpl implements AuthorityService{
|
||||
}
|
||||
|
||||
for (AuthorityIndexerInterface indexerInterface : indexers) {
|
||||
|
||||
indexerInterface.init(context , item);
|
||||
while (indexerInterface.hasMore()) {
|
||||
AuthorityValue authorityValue = indexerInterface.nextValue();
|
||||
if(authorityValue != null)
|
||||
indexingService.indexContent(authorityValue, true);
|
||||
List<AuthorityValue> authorityValues = indexerInterface.getAuthorityValues(context , item);
|
||||
for (AuthorityValue authorityValue : authorityValues) {
|
||||
indexingService.indexContent(authorityValue);
|
||||
}
|
||||
//Close up
|
||||
indexerInterface.close();
|
||||
}
|
||||
//Commit to our server
|
||||
indexingService.commit();
|
||||
|
@@ -62,7 +62,7 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho
|
||||
}
|
||||
|
||||
@Override
|
||||
public void indexContent(AuthorityValue value, boolean force) {
|
||||
public void indexContent(AuthorityValue value) {
|
||||
SolrInputDocument doc = value.getSolrInputDocument();
|
||||
|
||||
try{
|
||||
|
@@ -0,0 +1,20 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author Jonas Van Goolen (jonas at atmire dot com)
|
||||
*/
|
||||
public interface SolrAuthorityInterface {
|
||||
|
||||
List<AuthorityValue> queryAuthorities(String text, int max);
|
||||
|
||||
AuthorityValue queryAuthorityID(String id);
|
||||
}
|
@@ -11,9 +11,13 @@ import org.dspace.authority.AuthorityValue;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.authority.factory.AuthorityServiceFactory;
|
||||
import org.dspace.authority.service.AuthorityService;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Context;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@@ -31,6 +35,7 @@ public class AuthorityIndexClient {
|
||||
protected static final AuthorityService authorityService = AuthorityServiceFactory.getInstance().getAuthorityService();
|
||||
protected static final AuthorityIndexingService indexingService = AuthorityServiceFactory.getInstance().getAuthorityIndexingService();
|
||||
protected static final List<AuthorityIndexerInterface> indexers = AuthorityServiceFactory.getInstance().getAuthorityIndexers();
|
||||
protected static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
@@ -53,18 +58,23 @@ public class AuthorityIndexClient {
|
||||
|
||||
//Get all our values from the input forms
|
||||
Map<String, AuthorityValue> toIndexValues = new HashMap<>();
|
||||
|
||||
for (AuthorityIndexerInterface indexerInterface : indexers) {
|
||||
log.info("Initialize " + indexerInterface.getClass().getName());
|
||||
System.out.println("Initialize " + indexerInterface.getClass().getName());
|
||||
indexerInterface.init(context, true);
|
||||
while (indexerInterface.hasMore()) {
|
||||
AuthorityValue authorityValue = indexerInterface.nextValue();
|
||||
if(authorityValue != null){
|
||||
|
||||
Iterator<Item> allItems = itemService.findAll(context);
|
||||
Map<String, AuthorityValue> authorityCache = new HashMap<>();
|
||||
while (allItems.hasNext()) {
|
||||
Item item = allItems.next();
|
||||
|
||||
List<AuthorityValue> authorityValues = indexerInterface.getAuthorityValues(context, item, authorityCache);
|
||||
for (AuthorityValue authorityValue : authorityValues) {
|
||||
toIndexValues.put(authorityValue.getId(), authorityValue);
|
||||
}
|
||||
|
||||
context.uncacheEntity(item);
|
||||
}
|
||||
//Close up
|
||||
indexerInterface.close();
|
||||
}
|
||||
|
||||
|
||||
@@ -74,7 +84,7 @@ public class AuthorityIndexClient {
|
||||
log.info("Writing new data");
|
||||
System.out.println("Writing new data");
|
||||
for(String id : toIndexValues.keySet()){
|
||||
indexingService.indexContent(toIndexValues.get(id), true);
|
||||
indexingService.indexContent(toIndexValues.get(id));
|
||||
indexingService.commit();
|
||||
}
|
||||
|
||||
|
@@ -14,6 +14,8 @@ import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -24,17 +26,11 @@ import java.sql.SQLException;
|
||||
*/
|
||||
public interface AuthorityIndexerInterface {
|
||||
|
||||
public void init(Context context, Item item);
|
||||
public List<AuthorityValue> getAuthorityValues(Context context, Item item)
|
||||
throws SQLException, AuthorizeException;
|
||||
|
||||
public void init(Context context, boolean useCache);
|
||||
|
||||
public void init(Context context);
|
||||
|
||||
public AuthorityValue nextValue();
|
||||
|
||||
public boolean hasMore() throws SQLException, AuthorizeException;
|
||||
|
||||
public void close();
|
||||
public List<AuthorityValue> getAuthorityValues(Context context, Item item, Map<String, AuthorityValue> cache)
|
||||
throws SQLException, AuthorizeException;
|
||||
|
||||
public boolean isConfiguredProperly();
|
||||
}
|
||||
|
@@ -20,7 +20,7 @@ import org.dspace.authority.AuthorityValue;
|
||||
public interface AuthorityIndexingService {
|
||||
|
||||
|
||||
public void indexContent(AuthorityValue value, boolean force);
|
||||
public void indexContent(AuthorityValue value);
|
||||
|
||||
public void cleanIndex() throws Exception;
|
||||
|
||||
|
@@ -27,13 +27,13 @@ import java.util.*;
|
||||
/**
|
||||
* DSpaceAuthorityIndexer is used in IndexClient, which is called by the AuthorityConsumer and the indexing-script.
|
||||
* <p>
|
||||
* An instance of DSpaceAuthorityIndexer is bound to a list of items.
|
||||
* This can be one item or all items too depending on the init() method.
|
||||
* <p>
|
||||
* DSpaceAuthorityIndexer lets you iterate over each metadata value
|
||||
* for each metadata field defined in dspace.cfg with 'authority.author.indexer.field'
|
||||
* for each item in the list.
|
||||
* The DSpaceAuthorityIndexer will return a list of all authority values for a
|
||||
* given item. It will return an authority value for all metadata fields defined
|
||||
* in dspace.conf with 'authority.author.indexer.field'.
|
||||
* <p>
|
||||
* You have to call getAuthorityValues for every Item you want to index. But you
|
||||
* can supply an optional cache, to save the mapping from the metadata value to
|
||||
* the new authority values for metadata fields without an authority key.
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
@@ -44,23 +44,16 @@ public class DSpaceAuthorityIndexer implements AuthorityIndexerInterface, Initia
|
||||
|
||||
private static final Logger log = Logger.getLogger(DSpaceAuthorityIndexer.class);
|
||||
|
||||
protected Iterator<Item> itemIterator;
|
||||
protected Item currentItem;
|
||||
/**
|
||||
* The list of metadata fields which are to be indexed *
|
||||
*/
|
||||
protected List<String> metadataFields;
|
||||
protected int currentFieldIndex;
|
||||
protected int currentMetadataIndex;
|
||||
protected AuthorityValue nextValue;
|
||||
protected Context context;
|
||||
|
||||
@Autowired(required = true)
|
||||
protected AuthorityValueService authorityValueService;
|
||||
|
||||
@Autowired(required = true)
|
||||
protected ItemService itemService;
|
||||
protected boolean useCache;
|
||||
protected Map<String, AuthorityValue> cache;
|
||||
|
||||
|
||||
@Autowired(required = true)
|
||||
protected ConfigurationService configurationService;
|
||||
@@ -76,143 +69,100 @@ public class DSpaceAuthorityIndexer implements AuthorityIndexerInterface, Initia
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void init(Context context, Item item) {
|
||||
ArrayList<Item> itemList = new ArrayList<>();
|
||||
itemList.add(item);
|
||||
this.itemIterator = itemList.iterator();
|
||||
currentItem = this.itemIterator.next();
|
||||
initialize(context);
|
||||
public List<AuthorityValue> getAuthorityValues(Context context, Item item)
|
||||
throws SQLException, AuthorizeException
|
||||
{
|
||||
return getAuthorityValues(context, item, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void init(Context context) {
|
||||
init(context, false);
|
||||
}
|
||||
public List<AuthorityValue> getAuthorityValues(Context context, Item item, Map<String, AuthorityValue> cache)
|
||||
throws SQLException, AuthorizeException
|
||||
{
|
||||
List<AuthorityValue> values = new ArrayList<>();
|
||||
|
||||
@Override
|
||||
public void init(Context context, boolean useCache) {
|
||||
try {
|
||||
this.itemIterator = itemService.findAll(context);
|
||||
currentItem = this.itemIterator.next();
|
||||
} catch (SQLException e) {
|
||||
log.error("Error while retrieving all items in the metadata indexer");
|
||||
}
|
||||
initialize(context);
|
||||
this.useCache = useCache;
|
||||
}
|
||||
for (String metadataField : metadataFields) {
|
||||
List<MetadataValue> metadataValues = itemService.getMetadataByMetadataString(item, metadataField);
|
||||
for (MetadataValue metadataValue : metadataValues) {
|
||||
String content = metadataValue.getValue();
|
||||
String authorityKey = metadataValue.getAuthority();
|
||||
|
||||
protected void initialize(Context context) {
|
||||
this.context = context;
|
||||
// We only want to update our item IF our UUID is not present
|
||||
// or if we need to generate one.
|
||||
boolean requiresItemUpdate = StringUtils.isBlank(authorityKey) ||
|
||||
StringUtils.startsWith(authorityKey, AuthorityValueService.GENERATE);
|
||||
|
||||
currentFieldIndex = 0;
|
||||
currentMetadataIndex = 0;
|
||||
useCache = false;
|
||||
cache = new HashMap<>();
|
||||
}
|
||||
|
||||
@Override
|
||||
public AuthorityValue nextValue() {
|
||||
return nextValue;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean hasMore() throws SQLException, AuthorizeException {
|
||||
if (currentItem == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// 1. iterate over the metadata values
|
||||
|
||||
String metadataField = metadataFields.get(currentFieldIndex);
|
||||
List<MetadataValue> values = itemService.getMetadataByMetadataString(currentItem, metadataField);
|
||||
if (currentMetadataIndex < values.size()) {
|
||||
prepareNextValue(metadataField, values.get(currentMetadataIndex));
|
||||
|
||||
currentMetadataIndex++;
|
||||
return true;
|
||||
} else {
|
||||
|
||||
// 2. iterate over the metadata fields
|
||||
|
||||
if ((currentFieldIndex + 1) < metadataFields.size()) {
|
||||
currentFieldIndex++;
|
||||
//Reset our current metadata index since we are moving to another field
|
||||
currentMetadataIndex = 0;
|
||||
return hasMore();
|
||||
} else {
|
||||
|
||||
// 3. iterate over the items
|
||||
|
||||
if (itemIterator.hasNext()) {
|
||||
currentItem = itemIterator.next();
|
||||
//Reset our current field index
|
||||
currentFieldIndex = 0;
|
||||
//Reset our current metadata index
|
||||
currentMetadataIndex = 0;
|
||||
} else {
|
||||
currentItem = null;
|
||||
AuthorityValue value = null;
|
||||
if (StringUtils.isBlank(authorityKey) && cache != null) {
|
||||
// This is a value currently without an authority. So query
|
||||
// the cache, if an authority is found for the exact value.
|
||||
value = cache.get(content);
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
value = getAuthorityValue(context, metadataField, content,authorityKey);
|
||||
}
|
||||
|
||||
if (value != null) {
|
||||
if (requiresItemUpdate) {
|
||||
value.updateItem(context, item, metadataValue);
|
||||
|
||||
try {
|
||||
itemService.update(context, item);
|
||||
}
|
||||
catch (Exception e) {
|
||||
log.error("Error creating a metadatavalue's authority", e);
|
||||
}
|
||||
}
|
||||
|
||||
if (cache != null) {
|
||||
cache.put(content, value);
|
||||
}
|
||||
|
||||
values.add(value);
|
||||
}
|
||||
else {
|
||||
log.error("Error getting an authority value for " +
|
||||
"the metadata value \"" + content + "\" " +
|
||||
"in the field \"" + metadataField + "\" " +
|
||||
"of the item " + item.getHandle());
|
||||
}
|
||||
return hasMore();
|
||||
}
|
||||
}
|
||||
|
||||
return values;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method looks at the authority of a metadata.
|
||||
* This method looks at the authority of a metadata value.
|
||||
* If the authority can be found in solr, that value is reused.
|
||||
* Otherwise a new authority value will be generated that will be indexed in solr.
|
||||
*
|
||||
* If the authority starts with AuthorityValueGenerator.GENERATE, a specific type of AuthorityValue will be generated.
|
||||
* Depending on the type this may involve querying an external REST service
|
||||
*
|
||||
* @param context Current DSpace context
|
||||
* @param metadataField Is one of the fields defined in dspace.cfg to be indexed.
|
||||
* @param value Is one of the values of the given metadataField in one of the items being indexed.
|
||||
* @throws SQLException if database error
|
||||
* @throws AuthorizeException if authorization error
|
||||
* @param metadataContent Content of the current metadata value.
|
||||
* @param metadataAuthorityKey Existing authority of the metadata value.
|
||||
*/
|
||||
protected void prepareNextValue(String metadataField, MetadataValue value) throws SQLException, AuthorizeException {
|
||||
private AuthorityValue getAuthorityValue(Context context, String metadataField,
|
||||
String metadataContent, String metadataAuthorityKey)
|
||||
{
|
||||
if (StringUtils.isNotBlank(metadataAuthorityKey) &&
|
||||
!metadataAuthorityKey.startsWith(AuthorityValueService.GENERATE)) {
|
||||
// !uid.startsWith(AuthorityValueGenerator.GENERATE) is not strictly
|
||||
// necessary here but it prevents exceptions in solr
|
||||
|
||||
nextValue = null;
|
||||
|
||||
String content = value.getValue();
|
||||
String authorityKey = value.getAuthority();
|
||||
//We only want to update our item IF our UUID is not present or if we need to generate one.
|
||||
boolean requiresItemUpdate = StringUtils.isBlank(authorityKey) || StringUtils.startsWith(authorityKey, AuthorityValueService.GENERATE);
|
||||
|
||||
if (StringUtils.isNotBlank(authorityKey) && !authorityKey.startsWith(AuthorityValueService.GENERATE)) {
|
||||
// !uid.startsWith(AuthorityValueGenerator.GENERATE) is not strictly necessary here but it prevents exceptions in solr
|
||||
nextValue = authorityValueService.findByUID(context, authorityKey);
|
||||
}
|
||||
if (nextValue == null && StringUtils.isBlank(authorityKey) && useCache) {
|
||||
// A metadata without authority is being indexed
|
||||
// If there is an exact match in the cache, reuse it rather than adding a new one.
|
||||
AuthorityValue cachedAuthorityValue = cache.get(content);
|
||||
if (cachedAuthorityValue != null) {
|
||||
nextValue = cachedAuthorityValue;
|
||||
AuthorityValue value = authorityValueService.findByUID(context, metadataAuthorityKey);
|
||||
if (value != null) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
if (nextValue == null) {
|
||||
nextValue = authorityValueService.generate(context, authorityKey, content, metadataField.replaceAll("\\.", "_"));
|
||||
}
|
||||
if (nextValue != null && requiresItemUpdate) {
|
||||
nextValue.updateItem(context, currentItem, value);
|
||||
try {
|
||||
itemService.update(context, currentItem);
|
||||
} catch (Exception e) {
|
||||
log.error("Error creating a metadatavalue's authority", e);
|
||||
}
|
||||
}
|
||||
if (useCache) {
|
||||
cache.put(content, nextValue);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
itemIterator = null;
|
||||
cache.clear();
|
||||
return authorityValueService.generate(context, metadataAuthorityKey,
|
||||
metadataContent, metadataField.replaceAll("\\.", "_"));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -1,86 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid;
|
||||
|
||||
import org.dspace.authority.AuthorityValue;
|
||||
import org.dspace.authority.orcid.model.Bio;
|
||||
import org.dspace.authority.orcid.model.Work;
|
||||
import org.dspace.authority.orcid.xml.XMLtoBio;
|
||||
import org.dspace.authority.orcid.xml.XMLtoWork;
|
||||
import org.dspace.authority.rest.RestSource;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.w3c.dom.Document;
|
||||
|
||||
import java.net.URLEncoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class Orcid extends RestSource {
|
||||
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private static Logger log = Logger.getLogger(Orcid.class);
|
||||
|
||||
private static Orcid orcid;
|
||||
|
||||
public static Orcid getOrcid() {
|
||||
if (orcid == null) {
|
||||
orcid = DSpaceServicesFactory.getInstance().getServiceManager().getServiceByName("OrcidSource", Orcid.class);
|
||||
}
|
||||
return orcid;
|
||||
}
|
||||
|
||||
private Orcid(String url) {
|
||||
super(url);
|
||||
}
|
||||
|
||||
public Bio getBio(String id) {
|
||||
Document bioDocument = restConnector.get(id + "/orcid-bio");
|
||||
XMLtoBio converter = new XMLtoBio();
|
||||
Bio bio = converter.convert(bioDocument).get(0);
|
||||
bio.setOrcid(id);
|
||||
return bio;
|
||||
}
|
||||
|
||||
public List<Work> getWorks(String id) {
|
||||
Document document = restConnector.get(id + "/orcid-works");
|
||||
XMLtoWork converter = new XMLtoWork();
|
||||
return converter.convert(document);
|
||||
}
|
||||
|
||||
public List<Bio> queryBio(String name, int start, int rows) {
|
||||
Document bioDocument = restConnector.get("search/orcid-bio?q=" + URLEncoder.encode("\"" + name + "\"") + "&start=" + start + "&rows=" + rows);
|
||||
XMLtoBio converter = new XMLtoBio();
|
||||
return converter.convert(bioDocument);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<AuthorityValue> queryAuthorities(String text, int max) {
|
||||
List<Bio> bios = queryBio(text, 0, max);
|
||||
List<AuthorityValue> authorities = new ArrayList<AuthorityValue>();
|
||||
for (Bio bio : bios) {
|
||||
authorities.add(OrcidAuthorityValue.create(bio));
|
||||
}
|
||||
return authorities;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AuthorityValue queryAuthorityID(String id) {
|
||||
Bio bio = getBio(id);
|
||||
return OrcidAuthorityValue.create(bio);
|
||||
}
|
||||
}
|
@@ -1,320 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid;
|
||||
|
||||
import org.dspace.authority.AuthorityValue;
|
||||
import org.dspace.authority.AuthorityValueServiceImpl;
|
||||
import org.dspace.authority.PersonAuthorityValue;
|
||||
import org.dspace.authority.orcid.model.Bio;
|
||||
import org.dspace.authority.orcid.model.BioExternalIdentifier;
|
||||
import org.dspace.authority.orcid.model.BioName;
|
||||
import org.dspace.authority.orcid.model.BioResearcherUrl;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.solr.common.SolrDocument;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class OrcidAuthorityValue extends PersonAuthorityValue {
|
||||
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private static Logger log = Logger.getLogger(OrcidAuthorityValue.class);
|
||||
|
||||
private String orcid_id;
|
||||
private Map<String, List<String>> otherMetadata = new HashMap<String, List<String>>();
|
||||
private boolean update; // used in setValues(Bio bio)
|
||||
|
||||
|
||||
/**
|
||||
* Creates an instance of OrcidAuthorityValue with only uninitialized fields.
|
||||
* This is meant to be filled in with values from an existing record.
|
||||
* To create a brand new OrcidAuthorityValue, use create()
|
||||
*/
|
||||
public OrcidAuthorityValue() {
|
||||
}
|
||||
|
||||
public OrcidAuthorityValue(SolrDocument document) {
|
||||
super(document);
|
||||
}
|
||||
|
||||
public String getOrcid_id() {
|
||||
return orcid_id;
|
||||
}
|
||||
|
||||
public void setOrcid_id(String orcid_id) {
|
||||
this.orcid_id = orcid_id;
|
||||
}
|
||||
|
||||
public Map<String, List<String>> getOtherMetadata() {
|
||||
return otherMetadata;
|
||||
}
|
||||
|
||||
public void addOtherMetadata(String label, String data) {
|
||||
List<String> strings = otherMetadata.get(label);
|
||||
if (strings == null) {
|
||||
strings = new ArrayList<String>();
|
||||
}
|
||||
strings.add(data);
|
||||
otherMetadata.put(label, strings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SolrInputDocument getSolrInputDocument() {
|
||||
SolrInputDocument doc = super.getSolrInputDocument();
|
||||
if (StringUtils.isNotBlank(getOrcid_id())) {
|
||||
doc.addField("orcid_id", getOrcid_id());
|
||||
}
|
||||
|
||||
for (String t : otherMetadata.keySet()) {
|
||||
List<String> data = otherMetadata.get(t);
|
||||
for (String data_entry : data) {
|
||||
doc.addField("label_" + t, data_entry);
|
||||
}
|
||||
}
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setValues(SolrDocument document) {
|
||||
super.setValues(document);
|
||||
this.orcid_id = String.valueOf(document.getFieldValue("orcid_id"));
|
||||
|
||||
otherMetadata = new HashMap<String, List<String>>();
|
||||
for (String fieldName : document.getFieldNames()) {
|
||||
String labelPrefix = "label_";
|
||||
if (fieldName.startsWith(labelPrefix)) {
|
||||
String label = fieldName.substring(labelPrefix.length());
|
||||
List<String> list = new ArrayList<String>();
|
||||
Collection<Object> fieldValues = document.getFieldValues(fieldName);
|
||||
for (Object o : fieldValues) {
|
||||
list.add(String.valueOf(o));
|
||||
}
|
||||
otherMetadata.put(label, list);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static OrcidAuthorityValue create() {
|
||||
OrcidAuthorityValue orcidAuthorityValue = new OrcidAuthorityValue();
|
||||
orcidAuthorityValue.setId(UUID.randomUUID().toString());
|
||||
orcidAuthorityValue.updateLastModifiedDate();
|
||||
orcidAuthorityValue.setCreationDate(new Date());
|
||||
return orcidAuthorityValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an authority based on a given orcid bio
|
||||
* @param bio Bio
|
||||
* @return OrcidAuthorityValue
|
||||
*/
|
||||
public static OrcidAuthorityValue create(Bio bio) {
|
||||
OrcidAuthorityValue authority = OrcidAuthorityValue.create();
|
||||
|
||||
authority.setValues(bio);
|
||||
|
||||
return authority;
|
||||
}
|
||||
|
||||
public boolean setValues(Bio bio) {
|
||||
BioName name = bio.getName();
|
||||
|
||||
if (updateValue(bio.getOrcid(), getOrcid_id())) {
|
||||
setOrcid_id(bio.getOrcid());
|
||||
}
|
||||
|
||||
if (updateValue(name.getFamilyName(), getLastName())) {
|
||||
setLastName(name.getFamilyName());
|
||||
}
|
||||
|
||||
if (updateValue(name.getGivenNames(), getFirstName())) {
|
||||
setFirstName(name.getGivenNames());
|
||||
}
|
||||
|
||||
if (StringUtils.isNotBlank(name.getCreditName())) {
|
||||
if (!getNameVariants().contains(name.getCreditName())) {
|
||||
addNameVariant(name.getCreditName());
|
||||
update = true;
|
||||
}
|
||||
}
|
||||
for (String otherName : name.getOtherNames()) {
|
||||
if (!getNameVariants().contains(otherName)) {
|
||||
addNameVariant(otherName);
|
||||
update = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (updateOtherMetadata("country", bio.getCountry())) {
|
||||
addOtherMetadata("country", bio.getCountry());
|
||||
}
|
||||
|
||||
for (String keyword : bio.getKeywords()) {
|
||||
if (updateOtherMetadata("keyword", keyword)) {
|
||||
addOtherMetadata("keyword", keyword);
|
||||
}
|
||||
}
|
||||
|
||||
for (BioExternalIdentifier externalIdentifier : bio.getBioExternalIdentifiers()) {
|
||||
if (updateOtherMetadata("external_identifier", externalIdentifier.toString())) {
|
||||
addOtherMetadata("external_identifier", externalIdentifier.toString());
|
||||
}
|
||||
}
|
||||
|
||||
for (BioResearcherUrl researcherUrl : bio.getResearcherUrls()) {
|
||||
if (updateOtherMetadata("researcher_url", researcherUrl.toString())) {
|
||||
addOtherMetadata("researcher_url", researcherUrl.toString());
|
||||
}
|
||||
}
|
||||
|
||||
if (updateOtherMetadata("biography", bio.getBiography())) {
|
||||
addOtherMetadata("biography", bio.getBiography());
|
||||
}
|
||||
|
||||
setValue(getName());
|
||||
|
||||
if (update) {
|
||||
update();
|
||||
}
|
||||
boolean result = update;
|
||||
update = false;
|
||||
return result;
|
||||
}
|
||||
|
||||
private boolean updateOtherMetadata(String label, String data) {
|
||||
List<String> strings = getOtherMetadata().get(label);
|
||||
boolean update;
|
||||
if (strings == null) {
|
||||
update = StringUtils.isNotBlank(data);
|
||||
} else {
|
||||
update = !strings.contains(data);
|
||||
}
|
||||
if (update) {
|
||||
this.update = true;
|
||||
}
|
||||
return update;
|
||||
}
|
||||
|
||||
private boolean updateValue(String incoming, String resident) {
|
||||
boolean update = StringUtils.isNotBlank(incoming) && !incoming.equals(resident);
|
||||
if (update) {
|
||||
this.update = true;
|
||||
}
|
||||
return update;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> choiceSelectMap() {
|
||||
|
||||
Map<String, String> map = super.choiceSelectMap();
|
||||
|
||||
map.put("orcid", getOrcid_id());
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAuthorityType() {
|
||||
return "orcid";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String generateString() {
|
||||
String generateString = AuthorityValueServiceImpl.GENERATE + getAuthorityType() + AuthorityValueServiceImpl.SPLIT;
|
||||
if (StringUtils.isNotBlank(getOrcid_id())) {
|
||||
generateString += getOrcid_id();
|
||||
}
|
||||
return generateString;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public AuthorityValue newInstance(String info) {
|
||||
AuthorityValue authorityValue = null;
|
||||
if (StringUtils.isNotBlank(info)) {
|
||||
Orcid orcid = Orcid.getOrcid();
|
||||
authorityValue = orcid.queryAuthorityID(info);
|
||||
} else {
|
||||
authorityValue = OrcidAuthorityValue.create();
|
||||
}
|
||||
return authorityValue;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
OrcidAuthorityValue that = (OrcidAuthorityValue) o;
|
||||
|
||||
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return orcid_id != null ? orcid_id.hashCode() : 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasTheSameInformationAs(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
if (!super.hasTheSameInformationAs(o)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
OrcidAuthorityValue that = (OrcidAuthorityValue) o;
|
||||
|
||||
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (String key : otherMetadata.keySet()) {
|
||||
if(otherMetadata.get(key) != null){
|
||||
List<String> metadata = otherMetadata.get(key);
|
||||
List<String> otherMetadata = that.otherMetadata.get(key);
|
||||
if (otherMetadata == null) {
|
||||
return false;
|
||||
} else {
|
||||
HashSet<String> metadataSet = new HashSet<String>(metadata);
|
||||
HashSet<String> otherMetadataSet = new HashSet<String>(otherMetadata);
|
||||
if (!metadataSet.equals(otherMetadataSet)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}else{
|
||||
if(that.otherMetadata.get(key) != null){
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
185
dspace-api/src/main/java/org/dspace/authority/orcid/Orcidv2.java
Normal file
185
dspace-api/src/main/java/org/dspace/authority/orcid/Orcidv2.java
Normal file
@@ -0,0 +1,185 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.authority.AuthorityValue;
|
||||
import org.dspace.authority.SolrAuthorityInterface;
|
||||
import org.dspace.authority.orcid.xml.XMLtoBio;
|
||||
import org.dspace.authority.rest.RESTConnector;
|
||||
import org.json.JSONObject;
|
||||
import org.orcid.jaxb.model.record_v2.Person;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author Jonas Van Goolen (jonas at atmire dot com)
|
||||
* This class contains all methods for retrieving "Person" objects calling the ORCID (version 2) endpoints.
|
||||
* Additionally, this can also create AuthorityValues based on these returned Person objects
|
||||
*/
|
||||
public class Orcidv2 implements SolrAuthorityInterface {
|
||||
|
||||
private static Logger log = Logger.getLogger(Orcidv2.class);
|
||||
|
||||
public RESTConnector restConnector;
|
||||
private String OAUTHUrl;
|
||||
private String clientId;
|
||||
|
||||
private String clientSecret;
|
||||
|
||||
private String accessToken;
|
||||
|
||||
/**
|
||||
* Initialize the accessToken that is required for all subsequent calls to ORCID
|
||||
*/
|
||||
public void init() throws IOException {
|
||||
if (StringUtils.isNotBlank(accessToken) && StringUtils.isNotBlank(clientSecret)) {
|
||||
String authenticationParameters = "?client_id=" + clientId + "&client_secret=" + clientSecret + "&scope=/read-public&grant_type=client_credentials";
|
||||
HttpPost httpPost = new HttpPost(OAUTHUrl + authenticationParameters);
|
||||
httpPost.addHeader("Accept", "application/json");
|
||||
httpPost.addHeader("Content-Type", "application/x-www-form-urlencoded");
|
||||
|
||||
HttpClient httpClient = HttpClientBuilder.create().build();
|
||||
HttpResponse getResponse = httpClient.execute(httpPost);
|
||||
|
||||
InputStream is = getResponse.getEntity().getContent();
|
||||
BufferedReader streamReader = new BufferedReader(new InputStreamReader(is, "UTF-8"));
|
||||
|
||||
JSONObject responseObject = null;
|
||||
String inputStr;
|
||||
while ((inputStr = streamReader.readLine()) != null && responseObject == null) {
|
||||
if (inputStr.startsWith("{") && inputStr.endsWith("}") && inputStr.contains("access_token")) {
|
||||
try {
|
||||
responseObject = new JSONObject(inputStr);
|
||||
} catch (Exception e) {
|
||||
//Not as valid as I'd hoped, move along
|
||||
responseObject = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (responseObject != null && responseObject.has("access_token")) {
|
||||
accessToken = (String) responseObject.get("access_token");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes an instance of the Orcidv2 class based on the provided parameters.
|
||||
* This constructor is called through the spring bean initialization
|
||||
*/
|
||||
private Orcidv2(String url, String OAUTHUrl, String clientId, String clientSecret) {
|
||||
this.restConnector = new RESTConnector(url);
|
||||
this.OAUTHUrl = OAUTHUrl;
|
||||
this.clientId = clientId;
|
||||
this.clientSecret = clientSecret;
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes an instance of the Orcidv2 class based on the provided parameters.
|
||||
* This constructor is called through the spring bean initialization
|
||||
*/
|
||||
private Orcidv2(String url) {
|
||||
this.restConnector = new RESTConnector(url);
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes an instance of the AuthorityValue with the given information.
|
||||
* @param text search string
|
||||
* @return List<AuthorityValue>
|
||||
*/
|
||||
@Override
|
||||
public List<AuthorityValue> queryAuthorities(String text, int max) {
|
||||
List<Person> bios = queryBio(text, max);
|
||||
List<AuthorityValue> result = new ArrayList<>();
|
||||
for (Person person : bios) {
|
||||
AuthorityValue orcidAuthorityValue = Orcidv2AuthorityValue.create(person);
|
||||
if (orcidAuthorityValue != null) {
|
||||
result.add(orcidAuthorityValue);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an AuthorityValue from a Person retrieved using the given orcid identifier.
|
||||
* @param id orcid identifier
|
||||
* @return AuthorityValue
|
||||
*/
|
||||
public AuthorityValue queryAuthorityID(String id) {
|
||||
Person person = getBio(id);
|
||||
AuthorityValue valueFromPerson = Orcidv2AuthorityValue.create(person);
|
||||
return valueFromPerson;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a Person object based on a given orcid identifier
|
||||
* @param id orcid identifier
|
||||
* @return Person
|
||||
*/
|
||||
public Person getBio(String id) {
|
||||
log.debug("getBio called with ID=" + id);
|
||||
if(!isValid(id)){
|
||||
return null;
|
||||
}
|
||||
InputStream bioDocument = restConnector.get(id + ((id.endsWith("/person")) ? "" : "/person"), accessToken);
|
||||
XMLtoBio converter = new XMLtoBio();
|
||||
Person person = converter.convertSinglePerson(bioDocument);
|
||||
return person;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Retrieve a list of Person objects.
|
||||
* @param text search string
|
||||
* @param start offset to use
|
||||
* @param rows how many rows to return
|
||||
* @return List<Person>
|
||||
*/
|
||||
public List<Person> queryBio(String text, int start, int rows) {
|
||||
if (rows > 100) {
|
||||
throw new IllegalArgumentException("The maximum number of results to retrieve cannot exceed 100.");
|
||||
}
|
||||
|
||||
String searchPath = "search?q=" + URLEncoder.encode(text) + "&start=" + start + "&rows=" + rows;
|
||||
log.debug("queryBio searchPath=" + searchPath + " accessToken=" + accessToken);
|
||||
InputStream bioDocument = restConnector.get(searchPath, accessToken);
|
||||
XMLtoBio converter = new XMLtoBio();
|
||||
List<Person> bios = converter.convert(bioDocument);
|
||||
return bios;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a list of Person objects.
|
||||
* @param text search string
|
||||
* @param max how many rows to return
|
||||
* @return List<Person>
|
||||
*/
|
||||
public List<Person> queryBio(String text, int max) {
|
||||
return queryBio(text, 0, max);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check to see if the provided text has the correct ORCID syntax.
|
||||
* Since only searching on ORCID id is allowed, this way, we filter out any queries that would return a blank result anyway
|
||||
*/
|
||||
private boolean isValid(String text) {
|
||||
return StringUtils.isNotBlank(text) && text.matches(Orcidv2AuthorityValue.ORCID_ID_SYNTAX);
|
||||
}
|
||||
}
|
@@ -0,0 +1,330 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.solr.common.SolrDocument;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.dspace.authority.AuthorityValue;
|
||||
import org.dspace.authority.AuthorityValueServiceImpl;
|
||||
import org.dspace.authority.PersonAuthorityValue;
|
||||
import org.dspace.utils.DSpace;
|
||||
import org.orcid.jaxb.model.common_v2.ExternalId;
|
||||
import org.orcid.jaxb.model.record_v2.*;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* @author Jonas Van Goolen (jonas at atmire dot com)
|
||||
*/
|
||||
public class Orcidv2AuthorityValue extends PersonAuthorityValue {
|
||||
|
||||
/*
|
||||
* The ORCID identifier
|
||||
*/
|
||||
private String orcid_id;
|
||||
|
||||
/*
|
||||
* Map containing key-value pairs filled in by "setValues(Person person)".
|
||||
* This represents all dynamic information of the object.
|
||||
*/
|
||||
private Map<String, List<String>> otherMetadata = new HashMap<String, List<String>>();
|
||||
|
||||
/**
|
||||
* The syntax that the ORCID id needs to conform to
|
||||
*/
|
||||
public static final String ORCID_ID_SYNTAX = "\\d{4}-\\d{4}-\\d{4}-(\\d{3}X|\\d{4})";
|
||||
|
||||
|
||||
/**
|
||||
* Creates an instance of Orcidv2AuthorityValue with only uninitialized fields.
|
||||
* This is meant to be filled in with values from an existing record.
|
||||
* To create a brand new Orcidv2AuthorityValue, use create()
|
||||
*/
|
||||
public Orcidv2AuthorityValue() {
|
||||
}
|
||||
|
||||
public Orcidv2AuthorityValue(SolrDocument document) {
|
||||
super(document);
|
||||
}
|
||||
|
||||
|
||||
public String getOrcid_id() {
|
||||
return orcid_id;
|
||||
}
|
||||
|
||||
public void setOrcid_id(String orcid_id) {
|
||||
this.orcid_id = orcid_id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an empty authority.
|
||||
* @return OrcidAuthorityValue
|
||||
*/
|
||||
public static Orcidv2AuthorityValue create() {
|
||||
Orcidv2AuthorityValue orcidAuthorityValue = new Orcidv2AuthorityValue();
|
||||
orcidAuthorityValue.setId(UUID.randomUUID().toString());
|
||||
orcidAuthorityValue.updateLastModifiedDate();
|
||||
orcidAuthorityValue.setCreationDate(new Date());
|
||||
return orcidAuthorityValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an authority based on a given orcid bio
|
||||
* @return OrcidAuthorityValue
|
||||
*/
|
||||
public static Orcidv2AuthorityValue create(Person person) {
|
||||
if (person == null) {
|
||||
return null;
|
||||
}
|
||||
Orcidv2AuthorityValue authority = Orcidv2AuthorityValue.create();
|
||||
|
||||
authority.setValues(person);
|
||||
|
||||
return authority;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize this instance based on a Person object
|
||||
* @param person Person
|
||||
*/
|
||||
protected void setValues(Person person) {
|
||||
NameType name = person.getName();
|
||||
|
||||
if (!StringUtils.equals(name.getPath(), this.getOrcid_id())) {
|
||||
this.setOrcid_id(name.getPath());
|
||||
}
|
||||
|
||||
if (!StringUtils.equals(name.getFamilyName().getValue(), this.getLastName())) {
|
||||
this.setLastName(name.getFamilyName().getValue());
|
||||
}
|
||||
|
||||
if (!StringUtils.equals(name.getGivenNames().getValue(), this.getFirstName())) {
|
||||
this.setFirstName(name.getGivenNames().getValue());
|
||||
}
|
||||
|
||||
if (name.getCreditName() != null && StringUtils.isNotBlank(name.getCreditName().getValue())) {
|
||||
if (!this.getNameVariants().contains(name.getCreditName())) {
|
||||
this.addNameVariant(name.getCreditName().getValue());
|
||||
}
|
||||
}
|
||||
|
||||
if (person.getKeywords() != null) {
|
||||
for (KeywordType keyword : person.getKeywords().getKeyword()) {
|
||||
if (this.isNewMetadata("keyword", keyword.getContent())) {
|
||||
this.addOtherMetadata("keyword", keyword.getContent());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ExternalIdentifiers externalIdentifiers = person.getExternalIdentifiers();
|
||||
if (externalIdentifiers != null) {
|
||||
for (ExternalId externalIdentifier : externalIdentifiers.getExternalIdentifier()) {
|
||||
if (this.isNewMetadata("external_identifier", externalIdentifier.getExternalIdValue())) {
|
||||
this.addOtherMetadata("external_identifier", externalIdentifier.getExternalIdValue());
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
if (person.getResearcherUrls() != null) {
|
||||
for (ResearcherUrlType researcherUrl : person.getResearcherUrls().getResearcherUrl()) {
|
||||
if (this.isNewMetadata("researcher_url", researcherUrl.getUrl().getValue())) {
|
||||
this.addOtherMetadata("researcher_url", researcherUrl.getUrl().getValue());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
if (person.getBiography() != null) {
|
||||
if (this.isNewMetadata("biography", person.getBiography().getContent())) {
|
||||
this.addOtherMetadata("biography", person.getBiography().getContent());
|
||||
}
|
||||
}
|
||||
|
||||
this.setValue(this.getName());
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes an instance of the AuthorityValue with the given information.
|
||||
* @param info string info
|
||||
* @return AuthorityValue
|
||||
*/
|
||||
@Override
|
||||
public AuthorityValue newInstance(String info) {
|
||||
AuthorityValue authorityValue = null;
|
||||
if (StringUtils.isNotBlank(info)) {
|
||||
Orcidv2 orcid = new DSpace().getServiceManager().getServiceByName("AuthoritySource", Orcidv2.class);
|
||||
authorityValue = orcid.queryAuthorityID(info);
|
||||
} else {
|
||||
authorityValue = this.create();
|
||||
}
|
||||
return authorityValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setValue(String value) {
|
||||
super.setValue(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check to see if the provided label / data pair is already present in the "otherMetadata" or not
|
||||
* */
|
||||
public boolean isNewMetadata(String label, String data) {
|
||||
List<String> strings = getOtherMetadata().get(label);
|
||||
boolean update;
|
||||
if (strings == null) {
|
||||
update = StringUtils.isNotBlank(data);
|
||||
} else {
|
||||
update = !strings.contains(data);
|
||||
}
|
||||
return update;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add additional metadata to the otherMetadata map*/
|
||||
public void addOtherMetadata(String label, String data) {
|
||||
List<String> strings = otherMetadata.get(label);
|
||||
if (strings == null) {
|
||||
strings = new ArrayList<>();
|
||||
}
|
||||
strings.add(data);
|
||||
otherMetadata.put(label, strings);
|
||||
}
|
||||
|
||||
public Map<String, List<String>> getOtherMetadata() {
|
||||
return otherMetadata;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Generate a solr record from this instance
|
||||
* @return SolrInputDocument
|
||||
*/
|
||||
@Override
|
||||
public SolrInputDocument getSolrInputDocument() {
|
||||
SolrInputDocument doc = super.getSolrInputDocument();
|
||||
if (StringUtils.isNotBlank(getOrcid_id())) {
|
||||
doc.addField("orcid_id", getOrcid_id());
|
||||
}
|
||||
|
||||
for (String t : otherMetadata.keySet()) {
|
||||
List<String> data = otherMetadata.get(t);
|
||||
for (String data_entry : data) {
|
||||
doc.addField("label_" + t, data_entry);
|
||||
}
|
||||
}
|
||||
return doc;
|
||||
}
|
||||
|
||||
/**
|
||||
* Information that can be used the choice ui
|
||||
* @return map
|
||||
*/
|
||||
@Override
|
||||
public Map<String, String> choiceSelectMap() {
|
||||
|
||||
Map<String, String> map = super.choiceSelectMap();
|
||||
|
||||
String orcid_id = getOrcid_id();
|
||||
if (StringUtils.isNotBlank(orcid_id)) {
|
||||
map.put("orcid", orcid_id);
|
||||
}
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAuthorityType() {
|
||||
return "orcid";
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a string that will allow this AuthorityType to be recognized and provides information to create a new instance to be created using public Orcidv2AuthorityValue newInstance(String info).
|
||||
* @return see {@link org.dspace.authority.service.AuthorityValueService#GENERATE AuthorityValueService.GENERATE}
|
||||
*/
|
||||
@Override
|
||||
public String generateString() {
|
||||
String generateString = AuthorityValueServiceImpl.GENERATE + getAuthorityType() + AuthorityValueServiceImpl.SPLIT;
|
||||
if (StringUtils.isNotBlank(getOrcid_id())) {
|
||||
generateString += getOrcid_id();
|
||||
}
|
||||
return generateString;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Orcidv2AuthorityValue that = (Orcidv2AuthorityValue) o;
|
||||
|
||||
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return orcid_id != null ? orcid_id.hashCode() : 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* The regular equals() only checks if both AuthorityValues describe the same authority.
|
||||
* This method checks if the AuthorityValues have different information
|
||||
* E.g. it is used to decide when lastModified should be updated.
|
||||
* @param o object
|
||||
* @return true or false
|
||||
*/
|
||||
@Override
|
||||
public boolean hasTheSameInformationAs(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
if (!super.hasTheSameInformationAs(o)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Orcidv2AuthorityValue that = (Orcidv2AuthorityValue) o;
|
||||
|
||||
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (String key : otherMetadata.keySet()) {
|
||||
if (otherMetadata.get(key) != null) {
|
||||
List<String> metadata = otherMetadata.get(key);
|
||||
List<String> otherMetadata = that.otherMetadata.get(key);
|
||||
if (otherMetadata == null) {
|
||||
return false;
|
||||
} else {
|
||||
HashSet<String> metadataSet = new HashSet<String>(metadata);
|
||||
HashSet<String> otherMetadataSet = new HashSet<String>(otherMetadata);
|
||||
if (!metadataSet.equals(otherMetadataSet)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (that.otherMetadata.get(key) != null) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
@@ -1,113 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class Bio {
|
||||
|
||||
protected String orcid;
|
||||
|
||||
protected BioName name;
|
||||
|
||||
protected String country;
|
||||
|
||||
protected Set<String> keywords;
|
||||
|
||||
protected Set<BioExternalIdentifier> bioExternalIdentifiers;
|
||||
|
||||
protected Set<BioResearcherUrl> researcherUrls;
|
||||
|
||||
protected String biography;
|
||||
|
||||
public Bio() {
|
||||
this.name = new BioName();
|
||||
keywords = new LinkedHashSet<String>();
|
||||
bioExternalIdentifiers = new LinkedHashSet<BioExternalIdentifier>();
|
||||
researcherUrls = new LinkedHashSet<BioResearcherUrl>();
|
||||
}
|
||||
|
||||
public String getOrcid() {
|
||||
return orcid;
|
||||
}
|
||||
|
||||
public void setOrcid(String orcid) {
|
||||
this.orcid = orcid;
|
||||
}
|
||||
|
||||
public BioName getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(BioName name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getCountry() {
|
||||
return country;
|
||||
}
|
||||
|
||||
public void setCountry(String country) {
|
||||
this.country = country;
|
||||
}
|
||||
|
||||
public Set<String> getKeywords() {
|
||||
return keywords;
|
||||
}
|
||||
|
||||
public void addKeyword(String keyword) {
|
||||
this.keywords.add(keyword);
|
||||
}
|
||||
|
||||
public Set<BioExternalIdentifier> getBioExternalIdentifiers() {
|
||||
return bioExternalIdentifiers;
|
||||
}
|
||||
|
||||
public void addExternalIdentifier(BioExternalIdentifier externalReference) {
|
||||
bioExternalIdentifiers.add(externalReference);
|
||||
}
|
||||
|
||||
public Set<BioResearcherUrl> getResearcherUrls() {
|
||||
return researcherUrls;
|
||||
}
|
||||
|
||||
public void addResearcherUrl(BioResearcherUrl researcherUrl) {
|
||||
researcherUrls.add(researcherUrl);
|
||||
}
|
||||
|
||||
public String getBiography() {
|
||||
return biography;
|
||||
}
|
||||
|
||||
public void setBiography(String biography) {
|
||||
this.biography = biography;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Bio{" +
|
||||
"orcid='" + orcid + '\'' +
|
||||
", name=" + name +
|
||||
", country='" + country + '\'' +
|
||||
", keywords=" + keywords +
|
||||
", bioExternalIdentifiers=" + bioExternalIdentifiers +
|
||||
", researcherUrls=" + researcherUrls +
|
||||
", biography='" + biography + '\'' +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
|
@@ -1,109 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class BioExternalIdentifier {
|
||||
|
||||
|
||||
protected String id_orcid;
|
||||
protected String id_common_name;
|
||||
protected String id_reference;
|
||||
protected String id_url;
|
||||
|
||||
public BioExternalIdentifier(String id_orcid, String id_common_name, String id_reference, String id_url) {
|
||||
this.id_orcid = id_orcid;
|
||||
this.id_common_name = id_common_name;
|
||||
this.id_reference = id_reference;
|
||||
this.id_url = id_url;
|
||||
}
|
||||
|
||||
public String getId_orcid() {
|
||||
return id_orcid;
|
||||
}
|
||||
|
||||
public void setId_orcid(String id_orcid) {
|
||||
this.id_orcid = id_orcid;
|
||||
}
|
||||
|
||||
public String getId_common_name() {
|
||||
return id_common_name;
|
||||
}
|
||||
|
||||
public void setId_common_name(String id_common_name) {
|
||||
this.id_common_name = id_common_name;
|
||||
}
|
||||
|
||||
public String getId_reference() {
|
||||
return id_reference;
|
||||
}
|
||||
|
||||
public void setId_reference(String id_reference) {
|
||||
this.id_reference = id_reference;
|
||||
}
|
||||
|
||||
public String getId_url() {
|
||||
return id_url;
|
||||
}
|
||||
|
||||
public void setId_url(String id_url) {
|
||||
this.id_url = id_url;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "BioExternalIdentifier{" +
|
||||
"id_orcid='" + id_orcid + '\'' +
|
||||
", id_common_name='" + id_common_name + '\'' +
|
||||
", id_reference='" + id_reference + '\'' +
|
||||
", id_url='" + id_url + '\'' +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
BioExternalIdentifier that = (BioExternalIdentifier) o;
|
||||
|
||||
if (id_common_name != null ? !id_common_name.equals(that.id_common_name) : that.id_common_name != null) {
|
||||
return false;
|
||||
}
|
||||
if (id_orcid != null ? !id_orcid.equals(that.id_orcid) : that.id_orcid != null) {
|
||||
return false;
|
||||
}
|
||||
if (id_reference != null ? !id_reference.equals(that.id_reference) : that.id_reference != null) {
|
||||
return false;
|
||||
}
|
||||
if (id_url != null ? !id_url.equals(that.id_url) : that.id_url != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = id_orcid != null ? id_orcid.hashCode() : 0;
|
||||
result = 31 * result + (id_common_name != null ? id_common_name.hashCode() : 0);
|
||||
result = 31 * result + (id_reference != null ? id_reference.hashCode() : 0);
|
||||
result = 31 * result + (id_url != null ? id_url.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
}
|
@@ -1,115 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class BioName {
|
||||
|
||||
protected String givenNames;
|
||||
protected String familyName;
|
||||
protected String creditName;
|
||||
protected List<String> otherNames;
|
||||
|
||||
BioName() {
|
||||
otherNames = new ArrayList<String>();
|
||||
}
|
||||
|
||||
BioName(String givenNames, String familyName, String creditName, List<String> otherNames) {
|
||||
this.givenNames = givenNames;
|
||||
this.familyName = familyName;
|
||||
this.creditName = creditName;
|
||||
this.otherNames = otherNames;
|
||||
}
|
||||
|
||||
public String getGivenNames() {
|
||||
return givenNames;
|
||||
}
|
||||
|
||||
public void setGivenNames(String givenNames) {
|
||||
this.givenNames = givenNames;
|
||||
}
|
||||
|
||||
public String getFamilyName() {
|
||||
return familyName;
|
||||
}
|
||||
|
||||
public void setFamilyName(String familyName) {
|
||||
this.familyName = familyName;
|
||||
}
|
||||
|
||||
public String getCreditName() {
|
||||
return creditName;
|
||||
}
|
||||
|
||||
public void setCreditName(String creditName) {
|
||||
this.creditName = creditName;
|
||||
}
|
||||
|
||||
public List<String> getOtherNames() {
|
||||
return otherNames;
|
||||
}
|
||||
|
||||
public void setOtherNames(List<String> otherNames) {
|
||||
this.otherNames = otherNames;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "BioName{" +
|
||||
"givenNames='" + givenNames + '\'' +
|
||||
", familyName='" + familyName + '\'' +
|
||||
", creditName='" + creditName + '\'' +
|
||||
", otherNames=" + otherNames +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
BioName bioName = (BioName) o;
|
||||
|
||||
if (creditName != null ? !creditName.equals(bioName.creditName) : bioName.creditName != null) {
|
||||
return false;
|
||||
}
|
||||
if (familyName != null ? !familyName.equals(bioName.familyName) : bioName.familyName != null) {
|
||||
return false;
|
||||
}
|
||||
if (givenNames != null ? !givenNames.equals(bioName.givenNames) : bioName.givenNames != null) {
|
||||
return false;
|
||||
}
|
||||
if (otherNames != null ? !otherNames.equals(bioName.otherNames) : bioName.otherNames != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = givenNames != null ? givenNames.hashCode() : 0;
|
||||
result = 31 * result + (familyName != null ? familyName.hashCode() : 0);
|
||||
result = 31 * result + (creditName != null ? creditName.hashCode() : 0);
|
||||
result = 31 * result + (otherNames != null ? otherNames.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
}
|
@@ -1,78 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class BioResearcherUrl {
|
||||
|
||||
protected String name;
|
||||
protected String url;
|
||||
|
||||
public BioResearcherUrl(String name, String url) {
|
||||
this.name = name;
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
public void setUrl(String url) {
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "BioResearcherUrl{" +
|
||||
"name='" + name + '\'' +
|
||||
", url='" + url + '\'' +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
BioResearcherUrl that = (BioResearcherUrl) o;
|
||||
|
||||
if (name != null ? !name.equals(that.name) : that.name != null) {
|
||||
return false;
|
||||
}
|
||||
if (url != null ? !url.equals(that.url) : that.url != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = name != null ? name.hashCode() : 0;
|
||||
result = 31 * result + (url != null ? url.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
}
|
@@ -1,50 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class Citation {
|
||||
|
||||
private CitationType type;
|
||||
private String citation;
|
||||
|
||||
public Citation(CitationType type, String citation) {
|
||||
this.type = type;
|
||||
this.citation = citation;
|
||||
}
|
||||
|
||||
public CitationType getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public void setType(CitationType type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public String getCitation() {
|
||||
return citation;
|
||||
}
|
||||
|
||||
public void setCitation(String citation) {
|
||||
this.citation = citation;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Citation{" +
|
||||
"type=" + type +
|
||||
", citation='" + citation + '\'' +
|
||||
'}';
|
||||
}
|
||||
}
|
@@ -1,29 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public enum CitationType {
|
||||
|
||||
FORMATTED_UNSPECIFIED,
|
||||
BIBTEX,
|
||||
FORMATTED_APA,
|
||||
FORMATTED_HARVARD,
|
||||
FORMATTED_IEEE,
|
||||
FORMATTED_MLA,
|
||||
FORMATTED_VANCOUVER,
|
||||
FORMATTED_CHICAGO
|
||||
|
||||
}
|
@@ -1,111 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class Contributor {
|
||||
|
||||
private String orcid;
|
||||
private String creditName;
|
||||
private String email;
|
||||
private Set<ContributorAttribute> contributorAttributes;
|
||||
|
||||
public Contributor(String orcid, String creditName, String email, Set<ContributorAttribute> contributorAttributes) {
|
||||
this.orcid = orcid;
|
||||
this.creditName = creditName;
|
||||
this.email = email;
|
||||
this.contributorAttributes = contributorAttributes;
|
||||
}
|
||||
|
||||
public String getOrcid() {
|
||||
return orcid;
|
||||
}
|
||||
|
||||
public void setOrcid(String orcid) {
|
||||
this.orcid = orcid;
|
||||
}
|
||||
|
||||
public String getCreditName() {
|
||||
return creditName;
|
||||
}
|
||||
|
||||
public void setCreditName(String creditName) {
|
||||
this.creditName = creditName;
|
||||
}
|
||||
|
||||
public String getEmail() {
|
||||
return email;
|
||||
}
|
||||
|
||||
public void setEmail(String email) {
|
||||
this.email = email;
|
||||
}
|
||||
|
||||
public Set<ContributorAttribute> getContributorAttributes() {
|
||||
return contributorAttributes;
|
||||
}
|
||||
|
||||
public void setContributorAttributes(Set<ContributorAttribute> contributorAttributes) {
|
||||
this.contributorAttributes = contributorAttributes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Contributor{" +
|
||||
"orcid='" + orcid + '\'' +
|
||||
", creditName='" + creditName + '\'' +
|
||||
", email='" + email + '\'' +
|
||||
", contributorAttributes=" + contributorAttributes +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Contributor that = (Contributor) o;
|
||||
|
||||
if (contributorAttributes != null ? !contributorAttributes.equals(that.contributorAttributes) : that.contributorAttributes != null) {
|
||||
return false;
|
||||
}
|
||||
if (creditName != null ? !creditName.equals(that.creditName) : that.creditName != null) {
|
||||
return false;
|
||||
}
|
||||
if (email != null ? !email.equals(that.email) : that.email != null) {
|
||||
return false;
|
||||
}
|
||||
if (orcid != null ? !orcid.equals(that.orcid) : that.orcid != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = orcid != null ? orcid.hashCode() : 0;
|
||||
result = 31 * result + (creditName != null ? creditName.hashCode() : 0);
|
||||
result = 31 * result + (email != null ? email.hashCode() : 0);
|
||||
result = 31 * result + (contributorAttributes != null ? contributorAttributes.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
}
|
@@ -1,79 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class ContributorAttribute {
|
||||
|
||||
private ContributorAttributeRole role;
|
||||
private ContributorAttributeSequence sequence;
|
||||
|
||||
public ContributorAttribute(ContributorAttributeRole role, ContributorAttributeSequence sequence) {
|
||||
this.role = role;
|
||||
this.sequence = sequence;
|
||||
}
|
||||
|
||||
public ContributorAttributeRole getRole() {
|
||||
return role;
|
||||
}
|
||||
|
||||
public void setRole(ContributorAttributeRole role) {
|
||||
this.role = role;
|
||||
}
|
||||
|
||||
public ContributorAttributeSequence getSequence() {
|
||||
return sequence;
|
||||
}
|
||||
|
||||
public void setSequence(ContributorAttributeSequence sequence) {
|
||||
this.sequence = sequence;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ContributorAttribute{" +
|
||||
"role=" + role +
|
||||
", sequence=" + sequence +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ContributorAttribute that = (ContributorAttribute) o;
|
||||
|
||||
if (role != that.role) {
|
||||
return false;
|
||||
}
|
||||
if (sequence != that.sequence) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = role != null ? role.hashCode() : 0;
|
||||
result = 31 * result + (sequence != null ? sequence.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
}
|
@@ -1,32 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
/**
|
||||
* http://support.orcid.org/knowledgebase/articles/118843-anatomy-of-a-contributor
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public enum ContributorAttributeRole {
|
||||
|
||||
AUTHOR,
|
||||
ASSIGNEE,
|
||||
EDITOR,
|
||||
CHAIR_OR_TRANSLATOR,
|
||||
CO_INVESTIGATOR,
|
||||
CO_INVENTOR,
|
||||
GRADUATE_STUDENT,
|
||||
OTHER_INVENTOR,
|
||||
PRINCIPAL_INVESTIGATOR,
|
||||
POSTDOCTORAL_RESEARCHER,
|
||||
SUPPORT_STAFF
|
||||
|
||||
}
|
@@ -1,23 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
/**
|
||||
* http://support.orcid.org/knowledgebase/articles/118843-anatomy-of-a-contributor
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public enum ContributorAttributeSequence {
|
||||
|
||||
FIRST,
|
||||
ADDITIONAL
|
||||
|
||||
}
|
@@ -1,117 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class Work {
|
||||
|
||||
private WorkTitle workTitle;
|
||||
private String description;
|
||||
private Citation citation;
|
||||
private WorkType workType;
|
||||
private String publicationDate;
|
||||
private WorkExternalIdentifier workExternalIdentifier;
|
||||
private String url;
|
||||
private Set<Contributor> contributors;
|
||||
private String workSource;
|
||||
|
||||
public WorkTitle getWorkTitle() {
|
||||
return workTitle;
|
||||
}
|
||||
|
||||
public void setWorkTitle(WorkTitle workTitle) {
|
||||
this.workTitle = workTitle;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public Citation getCitation() {
|
||||
return citation;
|
||||
}
|
||||
|
||||
public void setCitation(Citation citation) {
|
||||
this.citation = citation;
|
||||
}
|
||||
|
||||
public WorkType getWorkType() {
|
||||
return workType;
|
||||
}
|
||||
|
||||
public void setWorkType(WorkType workType) {
|
||||
this.workType = workType;
|
||||
}
|
||||
|
||||
public String getPublicationDate() {
|
||||
return publicationDate;
|
||||
}
|
||||
|
||||
public void setPublicationDate(String publicationDate) {
|
||||
this.publicationDate = publicationDate;
|
||||
}
|
||||
|
||||
public WorkExternalIdentifier getWorkExternalIdentifier() {
|
||||
return workExternalIdentifier;
|
||||
}
|
||||
|
||||
public void setWorkExternalIdentifier(WorkExternalIdentifier workExternalIdentifier) {
|
||||
this.workExternalIdentifier = workExternalIdentifier;
|
||||
}
|
||||
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
public void setUrl(String url) {
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
public Set<Contributor> getContributors() {
|
||||
return contributors;
|
||||
}
|
||||
|
||||
public void setContributors(Set<Contributor> contributors) {
|
||||
this.contributors = contributors;
|
||||
}
|
||||
|
||||
public String getWorkSource() {
|
||||
return workSource;
|
||||
}
|
||||
|
||||
public void setWorkSource(String workSource) {
|
||||
this.workSource = workSource;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Work{" +
|
||||
"workTitle=" + workTitle +
|
||||
", description='" + description + '\'' +
|
||||
", citation=" + citation +
|
||||
", workType=" + workType +
|
||||
", publicationDate='" + publicationDate + '\'' +
|
||||
", workExternalIdentifier=" + workExternalIdentifier +
|
||||
", url='" + url + '\'' +
|
||||
", contributors=" + contributors +
|
||||
", workSource='" + workSource + '\'' +
|
||||
'}';
|
||||
}
|
||||
}
|
@@ -1,71 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
/**
|
||||
* http://support.orcid.org/knowledgebase/articles/118807
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class WorkExternalIdentifier {
|
||||
|
||||
private WorkExternalIdentifierType workExternalIdentifierType;
|
||||
private String workExternalIdenfitierID;
|
||||
|
||||
public WorkExternalIdentifier(WorkExternalIdentifierType workExternalIdentifierType, String workExternalIdenfitierID) {
|
||||
this.workExternalIdentifierType = workExternalIdentifierType;
|
||||
this.workExternalIdenfitierID = workExternalIdenfitierID;
|
||||
}
|
||||
|
||||
public WorkExternalIdentifierType getWorkExternalIdentifierType() {
|
||||
return workExternalIdentifierType;
|
||||
}
|
||||
|
||||
public void setWorkExternalIdentifierType(WorkExternalIdentifierType workExternalIdentifierType) {
|
||||
this.workExternalIdentifierType = workExternalIdentifierType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "WorkExternalIdentifier{" +
|
||||
"workExternalIdentifierType=" + workExternalIdentifierType +
|
||||
", workExternalIdenfitierID='" + workExternalIdenfitierID + '\'' +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
WorkExternalIdentifier that = (WorkExternalIdentifier) o;
|
||||
|
||||
if (workExternalIdenfitierID != null ? !workExternalIdenfitierID.equals(that.workExternalIdenfitierID) : that.workExternalIdenfitierID != null) {
|
||||
return false;
|
||||
}
|
||||
if (workExternalIdentifierType != that.workExternalIdentifierType) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = workExternalIdentifierType != null ? workExternalIdentifierType.hashCode() : 0;
|
||||
result = 31 * result + (workExternalIdenfitierID != null ? workExternalIdenfitierID.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
}
|
@@ -1,42 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
/**
|
||||
* http://support.orcid.org/knowledgebase/articles/118807
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public enum WorkExternalIdentifierType {
|
||||
|
||||
// OTHER_ID,
|
||||
ARXIV,
|
||||
ASIN,
|
||||
ASIN_TLD,
|
||||
BIBCODE,
|
||||
DOI,
|
||||
EID,
|
||||
ISBN,
|
||||
ISSN,
|
||||
JFM,
|
||||
JSTOR,
|
||||
LCCN,
|
||||
MR,
|
||||
OCLC,
|
||||
OL,
|
||||
OSTI,
|
||||
PMC,
|
||||
PMID,
|
||||
RFC,
|
||||
SSRN,
|
||||
ZBL
|
||||
|
||||
}
|
@@ -1,64 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* http://support.orcid.org/knowledgebase/articles/118807
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class WorkTitle {
|
||||
|
||||
private String title;
|
||||
private String subtitle;
|
||||
private Map<String, String> translatedTitles;
|
||||
|
||||
public WorkTitle(String title, String subtitle, Map<String, String> translatedTitles) {
|
||||
this.title = title;
|
||||
this.subtitle = subtitle;
|
||||
this.translatedTitles = translatedTitles;
|
||||
}
|
||||
|
||||
public String getTitle() {
|
||||
return title;
|
||||
}
|
||||
|
||||
public void setTitle(String title) {
|
||||
this.title = title;
|
||||
}
|
||||
|
||||
public String getSubtitle() {
|
||||
return subtitle;
|
||||
}
|
||||
|
||||
public void setSubtitle(String subtitle) {
|
||||
this.subtitle = subtitle;
|
||||
}
|
||||
|
||||
public String getTranslatedTitles(String languageCode) {
|
||||
return translatedTitles.get(languageCode);
|
||||
}
|
||||
|
||||
public void setTranslatedTitle(String languageCode, String translatedTitle) {
|
||||
translatedTitles.put(languageCode, translatedTitle);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "WorkTitle{" +
|
||||
"title='" + title + '\'' +
|
||||
", subtitle='" + subtitle + '\'' +
|
||||
", translatedTitles=" + translatedTitles +
|
||||
'}';
|
||||
}
|
||||
}
|
@@ -1,57 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.model;
|
||||
|
||||
/**
|
||||
* http://support.orcid.org/knowledgebase/articles/118795
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public enum WorkType {
|
||||
|
||||
BOOK,
|
||||
BOOK_CHAPTER,
|
||||
BOOK_REVIEW,
|
||||
DICTIONARY_ENTRY,
|
||||
DISSERTATION,
|
||||
ENCYCLOPEDIA_ARTICLE,
|
||||
EDITED_BOOK,
|
||||
JOURNAL_ARTICLE,
|
||||
JOURNAL_ISSUE,
|
||||
MAGAZINE_ARTICLE,
|
||||
MANUAL,
|
||||
ONLINE_RESOURCE,
|
||||
NEWSLETTER_ARTICLE,
|
||||
NEWSPAPER_ARTICLE,
|
||||
REPORT,
|
||||
RESEARCH_TOOL,
|
||||
SUPERVISED_STUDENT_PUBLICATION,
|
||||
TEST,
|
||||
TRANSLATION,
|
||||
WEBSITE,
|
||||
CONFERENCE_ABSTRACT,
|
||||
CONFERENCE_PAPER,
|
||||
CONFERENCE_POSTER,
|
||||
DISCLOSURE,
|
||||
LICENSE,
|
||||
PATENT,
|
||||
REGISTERED_COPYRIGHT,
|
||||
ARTISTIC_PERFORMANCE,
|
||||
DATA_SET,
|
||||
INVENTION,
|
||||
LECTURE_SPEECH,
|
||||
RESEARCH_TECHNIQUE,
|
||||
SPIN_OFF_COMPANY,
|
||||
STANDARDS_AND_POLICY,
|
||||
TECHNICAL_STANDARD,
|
||||
OTHER
|
||||
|
||||
}
|
@@ -8,7 +8,13 @@
|
||||
package org.dspace.authority.orcid.xml;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.w3c.dom.Document;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
import javax.xml.bind.JAXBContext;
|
||||
import javax.xml.bind.JAXBException;
|
||||
import javax.xml.bind.Unmarshaller;
|
||||
import java.io.InputStream;
|
||||
import java.net.URISyntaxException;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -25,11 +31,15 @@ public abstract class Converter<T> {
|
||||
*/
|
||||
private static Logger log = Logger.getLogger(Converter.class);
|
||||
|
||||
public abstract T convert(InputStream document);
|
||||
|
||||
protected void processError(Document xml) {
|
||||
String errorMessage = XMLErrors.getErrorMessage(xml);
|
||||
log.error("The orcid-message reports an error: " + errorMessage);
|
||||
protected Object unmarshall(InputStream input, Class<?> type) throws SAXException, URISyntaxException {
|
||||
try {
|
||||
JAXBContext context = JAXBContext.newInstance(type);
|
||||
Unmarshaller unmarshaller = context.createUnmarshaller();
|
||||
return unmarshaller.unmarshal(input);
|
||||
} catch (JAXBException e) {
|
||||
throw new RuntimeException("Unable to unmarshall orcid message" + e);
|
||||
}
|
||||
}
|
||||
|
||||
public abstract T convert(Document document);
|
||||
}
|
||||
|
@@ -1,73 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.xml;
|
||||
|
||||
import org.dspace.authority.util.XMLUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.w3c.dom.Document;
|
||||
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class XMLErrors {
|
||||
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private static Logger log = Logger.getLogger(XMLErrors.class);
|
||||
|
||||
private static final String ERROR_DESC = "/orcid-message/error-desc";
|
||||
|
||||
/**
|
||||
* Evaluates whether a given xml document contains errors or not.
|
||||
*
|
||||
* @param xml The given xml document
|
||||
* @return true if the given xml document is null
|
||||
* or if it contains errors
|
||||
*/
|
||||
public static boolean check(Document xml) {
|
||||
|
||||
if (xml == null) {
|
||||
return true;
|
||||
}
|
||||
|
||||
String textContent = null;
|
||||
|
||||
try {
|
||||
textContent = XMLUtils.getTextContent(xml, ERROR_DESC);
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error while checking for errors in orcid message", e);
|
||||
}
|
||||
|
||||
return textContent == null;
|
||||
}
|
||||
|
||||
public static String getErrorMessage(Document xml) {
|
||||
|
||||
if (xml == null) {
|
||||
return "Did not receive an XML document.";
|
||||
}
|
||||
|
||||
String textContent = null;
|
||||
|
||||
try {
|
||||
textContent = XMLUtils.getTextContent(xml, ERROR_DESC);
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error while checking for errors in orcid message", e);
|
||||
}
|
||||
|
||||
return textContent;
|
||||
}
|
||||
|
||||
}
|
@@ -7,23 +7,22 @@
|
||||
*/
|
||||
package org.dspace.authority.orcid.xml;
|
||||
|
||||
import org.dspace.authority.orcid.model.Bio;
|
||||
import org.dspace.authority.orcid.model.BioExternalIdentifier;
|
||||
import org.dspace.authority.orcid.model.BioName;
|
||||
import org.dspace.authority.orcid.model.BioResearcherUrl;
|
||||
import org.dspace.authority.util.XMLUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Node;
|
||||
import org.w3c.dom.NodeList;
|
||||
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.authority.orcid.Orcidv2;
|
||||
import org.dspace.utils.DSpace;
|
||||
import org.orcid.jaxb.model.common_v2.OrcidId;
|
||||
import org.orcid.jaxb.model.record_v2.Person;
|
||||
import org.orcid.jaxb.model.search_v2.Result;
|
||||
import org.orcid.jaxb.model.search_v2.Search;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
@@ -36,218 +35,40 @@ public class XMLtoBio extends Converter {
|
||||
*/
|
||||
private static Logger log = Logger.getLogger(XMLtoBio.class);
|
||||
|
||||
/**
|
||||
* orcid-message XPATHs
|
||||
*/
|
||||
|
||||
protected String ORCID_BIO = "//orcid-bio";
|
||||
|
||||
// protected String ORCID = "parent::*/orcid";
|
||||
protected String ORCID = "parent::*/orcid-identifier/path";
|
||||
|
||||
protected String PERSONAL_DETAILS = "personal-details";
|
||||
protected String GIVEN_NAMES = PERSONAL_DETAILS + "/given-names";
|
||||
protected String FAMILY_NAME = PERSONAL_DETAILS + "/family-name";
|
||||
protected String CREDIT_NAME = PERSONAL_DETAILS + "/credit-name";
|
||||
protected String OTHER_NAMES = PERSONAL_DETAILS + "/other-names";
|
||||
protected String OTHER_NAME = OTHER_NAMES + "/other-name";
|
||||
|
||||
protected String CONTACT_DETAILS = "contact-details";
|
||||
protected String COUNTRY = CONTACT_DETAILS + "/address/country";
|
||||
|
||||
protected String KEYWORDS = "keywords";
|
||||
protected String KEYWORD = KEYWORDS + "/keyword";
|
||||
|
||||
protected String EXTERNAL_IDENTIFIERS = "external-identifiers";
|
||||
protected String EXTERNAL_IDENTIFIER = EXTERNAL_IDENTIFIERS + "/external-identifier";
|
||||
protected String EXTERNAL_ID_ORCID = "external-id-orcid";
|
||||
protected String EXTERNAL_ID_COMMNON_NAME = "external-id-common-name";
|
||||
protected String EXTERNAL_ID_REFERENCE = "external-id-reference";
|
||||
protected String EXTERNAL_ID_URL = "external-id-url";
|
||||
|
||||
protected String RESEARCHER_URLS = "researcher-urls";
|
||||
protected String RESEARCHER_URL = "researcher-urls/researcher-url";
|
||||
protected String URL_NAME = "url-name";
|
||||
protected String URL = "url";
|
||||
|
||||
protected String BIOGRAPHY = ORCID_BIO + "/biography";
|
||||
|
||||
protected String AFFILIATIONS = ORCID_BIO + "/affiliation";
|
||||
|
||||
/**
|
||||
* Regex
|
||||
*/
|
||||
|
||||
protected String ORCID_NOT_FOUND = "ORCID [\\d-]* not found";
|
||||
|
||||
|
||||
@Override
|
||||
public List<Bio> convert(Document xml) {
|
||||
List<Bio> result = new ArrayList<Bio>();
|
||||
|
||||
if (XMLErrors.check(xml)) {
|
||||
|
||||
try {
|
||||
Iterator<Node> iterator = XMLUtils.getNodeListIterator(xml, ORCID_BIO);
|
||||
while (iterator.hasNext()) {
|
||||
Bio bio = convertBio(iterator.next());
|
||||
result.add(bio);
|
||||
}
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in xpath syntax", e);
|
||||
}
|
||||
} else {
|
||||
processError(xml);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private Bio convertBio(Node node) {
|
||||
Bio bio = new Bio();
|
||||
|
||||
setOrcid(node,bio);
|
||||
setPersonalDetails(node, bio);
|
||||
setContactDetails(node, bio);
|
||||
setKeywords(node, bio);
|
||||
setExternalIdentifiers(node, bio);
|
||||
setResearcherUrls(node, bio);
|
||||
setBiography(node, bio);
|
||||
|
||||
return bio;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void processError(Document xml) {
|
||||
String errorMessage = XMLErrors.getErrorMessage(xml);
|
||||
|
||||
if(errorMessage.matches(ORCID_NOT_FOUND))
|
||||
{
|
||||
// do something?
|
||||
}
|
||||
|
||||
log.error("The orcid-message reports an error: " + errorMessage);
|
||||
}
|
||||
|
||||
|
||||
private void setOrcid(Node node, Bio bio) {
|
||||
public List<Person> convert(InputStream xml) {
|
||||
List<Person> bios= new ArrayList<>();
|
||||
try {
|
||||
String orcid = XMLUtils.getTextContent(node, ORCID);
|
||||
bio.setOrcid(orcid);
|
||||
} catch (XPathExpressionException e) {
|
||||
log.debug("Error in finding the biography in bio xml.", e);
|
||||
}
|
||||
}
|
||||
Orcidv2 connector = new DSpace().getServiceManager().getServiceByName("AuthoritySource", Orcidv2.class);
|
||||
|
||||
protected void setBiography(Node xml, Bio bio) {
|
||||
try {
|
||||
String biography = XMLUtils.getTextContent(xml, BIOGRAPHY);
|
||||
bio.setBiography(biography);
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in finding the biography in bio xml.", e);
|
||||
}
|
||||
}
|
||||
|
||||
protected void setResearcherUrls(Node xml, Bio bio) {
|
||||
try {
|
||||
NodeList researcher_urls = XMLUtils.getNodeList(xml, RESEARCHER_URL);
|
||||
if (researcher_urls != null) {
|
||||
for (int i = 0; i < researcher_urls.getLength(); i++) {
|
||||
Node researcher_url = researcher_urls.item(i);
|
||||
if (researcher_url.getNodeType() != Node.TEXT_NODE) {
|
||||
String url_name = XMLUtils.getTextContent(researcher_url, URL_NAME);
|
||||
String url = XMLUtils.getTextContent(researcher_url, URL);
|
||||
BioResearcherUrl researcherUrl = new BioResearcherUrl(url_name, url);
|
||||
bio.addResearcherUrl(researcherUrl);
|
||||
Search search = (Search) unmarshall(xml, Search.class);
|
||||
for(Result result : search.getResult()){
|
||||
OrcidId orcidIdentifier = result.getOrcidIdentifier();
|
||||
if(orcidIdentifier!=null){
|
||||
log.debug("Found OrcidId=" + orcidIdentifier.toString());
|
||||
String orcid = orcidIdentifier.getUriPath();
|
||||
Person bio = connector.getBio(orcid);
|
||||
if(bio!=null){
|
||||
bios.add(bio);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in finding the researcher url in bio xml.", e);
|
||||
} catch (SAXException | URISyntaxException e) {
|
||||
log.error(e);
|
||||
}
|
||||
return bios;
|
||||
}
|
||||
|
||||
protected void setExternalIdentifiers(Node xml, Bio bio) {
|
||||
public Person convertSinglePerson(InputStream xml) {
|
||||
Person person = null;
|
||||
try {
|
||||
|
||||
Iterator<Node> iterator = XMLUtils.getNodeListIterator(xml, EXTERNAL_IDENTIFIER);
|
||||
while (iterator.hasNext()) {
|
||||
Node external_identifier = iterator.next();
|
||||
String id_orcid = XMLUtils.getTextContent(external_identifier, EXTERNAL_ID_ORCID);
|
||||
String id_common_name = XMLUtils.getTextContent(external_identifier, EXTERNAL_ID_COMMNON_NAME);
|
||||
String id_reference = XMLUtils.getTextContent(external_identifier, EXTERNAL_ID_REFERENCE);
|
||||
String id_url = XMLUtils.getTextContent(external_identifier, EXTERNAL_ID_URL);
|
||||
BioExternalIdentifier externalIdentifier = new BioExternalIdentifier(id_orcid, id_common_name, id_reference, id_url);
|
||||
bio.addExternalIdentifier(externalIdentifier);
|
||||
}
|
||||
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in finding the external identifier in bio xml.", e);
|
||||
}
|
||||
}
|
||||
|
||||
protected void setKeywords(Node xml, Bio bio) {
|
||||
try {
|
||||
NodeList keywords = XMLUtils.getNodeList(xml, KEYWORD);
|
||||
if (keywords != null) {
|
||||
for (int i = 0; i < keywords.getLength(); i++) {
|
||||
String keyword = keywords.item(i).getTextContent();
|
||||
String[] split = keyword.split(",");
|
||||
for (String k : split) {
|
||||
bio.addKeyword(k.trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in finding the keywords in bio xml.", e);
|
||||
}
|
||||
}
|
||||
|
||||
protected void setContactDetails(Node xml, Bio bio) {
|
||||
try {
|
||||
String country = XMLUtils.getTextContent(xml, COUNTRY);
|
||||
bio.setCountry(country);
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in finding the country in bio xml.", e);
|
||||
}
|
||||
}
|
||||
|
||||
protected void setPersonalDetails(Node xml, Bio bio) {
|
||||
BioName name = bio.getName();
|
||||
|
||||
try {
|
||||
String givenNames = XMLUtils.getTextContent(xml, GIVEN_NAMES);
|
||||
name.setGivenNames(givenNames);
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in finding the given names in bio xml.", e);
|
||||
}
|
||||
|
||||
try {
|
||||
String familyName = XMLUtils.getTextContent(xml, FAMILY_NAME);
|
||||
name.setFamilyName(familyName);
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in finding the family name in bio xml.", e);
|
||||
}
|
||||
|
||||
try {
|
||||
String creditName = XMLUtils.getTextContent(xml, CREDIT_NAME);
|
||||
name.setCreditName(creditName);
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in finding the credit name in bio xml.", e);
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
Iterator<Node> iterator = XMLUtils.getNodeListIterator(xml, OTHER_NAME);
|
||||
while (iterator.hasNext()) {
|
||||
Node otherName = iterator.next();
|
||||
String textContent = otherName.getTextContent();
|
||||
name.getOtherNames().add(textContent.trim());
|
||||
}
|
||||
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in finding the other names in bio xml.", e);
|
||||
person = (Person) unmarshall(xml, Person.class);
|
||||
return person;
|
||||
} catch (SAXException | URISyntaxException e) {
|
||||
log.error(e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
@@ -1,240 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.authority.orcid.xml;
|
||||
|
||||
import org.dspace.authority.orcid.model.*;
|
||||
import org.dspace.authority.util.*;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Node;
|
||||
import org.w3c.dom.NodeList;
|
||||
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public class XMLtoWork extends Converter {
|
||||
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private static Logger log = Logger.getLogger(XMLtoWork.class);
|
||||
|
||||
/**
|
||||
* orcid-message XPATHs
|
||||
*/
|
||||
|
||||
protected String ORCID_WORKS = "//orcid-works";
|
||||
protected String ORCID_WORK = ORCID_WORKS + "/orcid-work";
|
||||
|
||||
protected String WORK_TITLE = "work-title";
|
||||
protected String TITLE = WORK_TITLE + "/title";
|
||||
protected String SUBTITLE = WORK_TITLE + "/subtitle";
|
||||
protected String TRANSLATED_TITLES = WORK_TITLE + "/translated-title";
|
||||
protected String TRANSLATED_TITLES_LANGUAGE = "@language-code";
|
||||
|
||||
protected String SHORT_DESCRIPTION = "short-description";
|
||||
|
||||
protected String WORK_CITATION = "work-citation";
|
||||
protected String CITATION_TYPE = WORK_CITATION + "/work-citation-type";
|
||||
protected String CITATION = WORK_CITATION + "/citation";
|
||||
|
||||
protected String WORK_TYPE = "work-type";
|
||||
|
||||
protected String PUBLICATION_DATE = "publication-date";
|
||||
protected String YEAR = PUBLICATION_DATE + "/year";
|
||||
protected String MONTH = PUBLICATION_DATE + "/month";
|
||||
protected String DAY = PUBLICATION_DATE + "/day";
|
||||
|
||||
protected String WORK_EXTERNAL_IDENTIFIERS = "work-external-identifiers";
|
||||
protected String WORK_EXTERNAL_IDENTIFIER = WORK_EXTERNAL_IDENTIFIERS + "/work-external-identifier";
|
||||
protected String WORK_EXTERNAL_IDENTIFIER_TYPE = "work-external-identifier-type";
|
||||
protected String WORK_EXTERNAL_IDENTIFIER_ID = "work-external-identifier-id";
|
||||
|
||||
protected String URL = "url";
|
||||
|
||||
protected String WORK_CONTRIBUTOR = "work-contributors";
|
||||
protected String CONTRIBUTOR = WORK_CONTRIBUTOR+"/contributor";
|
||||
protected String CONTRIBUTOR_ORCID = "contributor-orcid";
|
||||
protected String CREDIT_NAME = "credit-name";
|
||||
protected String CONTRIBUTOR_EMAIL = "contributor-email";
|
||||
protected String CONTRIBUTOR_ATTRIBUTES = "contributor-attributes";
|
||||
protected String CONTRIBUTOR_SEQUENCE = "contributor-sequence";
|
||||
protected String CONTRIBUTOR_ROLE = "contributor-role";
|
||||
|
||||
protected String WORK_SOURCE = "work-source";
|
||||
|
||||
|
||||
@Override
|
||||
public List<Work> convert(Document document) {
|
||||
List<Work> result = new ArrayList<Work>();
|
||||
|
||||
if (XMLErrors.check(document)) {
|
||||
|
||||
try {
|
||||
Iterator<Node> iterator = XMLUtils.getNodeListIterator(document, ORCID_WORK);
|
||||
while (iterator.hasNext()) {
|
||||
Work work = convertWork(iterator.next());
|
||||
result.add(work);
|
||||
}
|
||||
} catch (XPathExpressionException e) {
|
||||
log.error("Error in xpath syntax", e);
|
||||
}
|
||||
} else {
|
||||
processError(document);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
protected Work convertWork(Node node) throws XPathExpressionException {
|
||||
Work work = new Work();
|
||||
setTitle(node, work);
|
||||
setDescription(node, work);
|
||||
setCitation(node, work);
|
||||
setWorkType(node, work);
|
||||
setPublicationDate(node, work);
|
||||
setExternalIdentifiers(node, work);
|
||||
setUrl(node, work);
|
||||
setContributors(node, work);
|
||||
setWorkSource(node, work);
|
||||
|
||||
return work;
|
||||
}
|
||||
|
||||
protected void setWorkSource(Node node, Work work) throws XPathExpressionException {
|
||||
String workSource = XMLUtils.getTextContent(node, WORK_SOURCE);
|
||||
work.setWorkSource(workSource);
|
||||
}
|
||||
|
||||
protected void setContributors(Node node, Work work) throws XPathExpressionException {
|
||||
|
||||
Set<Contributor> contributors = new HashSet<Contributor>();
|
||||
|
||||
Iterator<Node> iterator = XMLUtils.getNodeListIterator(node, CONTRIBUTOR);
|
||||
while (iterator.hasNext()) {
|
||||
Node nextContributorNode = iterator.next();
|
||||
String orcid = XMLUtils.getTextContent(nextContributorNode, CONTRIBUTOR_ORCID);
|
||||
String creditName = XMLUtils.getTextContent(nextContributorNode, CREDIT_NAME);
|
||||
String email = XMLUtils.getTextContent(nextContributorNode, CONTRIBUTOR_EMAIL);
|
||||
|
||||
Set<ContributorAttribute> contributorAttributes = new HashSet<ContributorAttribute>();
|
||||
NodeList attributeNodes = XMLUtils.getNodeList(nextContributorNode, CONTRIBUTOR_ATTRIBUTES);
|
||||
Iterator<Node> attributesIterator = XMLUtils.getNodeListIterator(attributeNodes);
|
||||
while (attributesIterator.hasNext()) {
|
||||
Node nextAttribute = attributesIterator.next();
|
||||
|
||||
String roleText = XMLUtils.getTextContent(nextAttribute, CONTRIBUTOR_ROLE);
|
||||
ContributorAttributeRole role = EnumUtils.lookup(ContributorAttributeRole.class, roleText);
|
||||
|
||||
String sequenceText = XMLUtils.getTextContent(nextAttribute, CONTRIBUTOR_SEQUENCE);
|
||||
ContributorAttributeSequence sequence = EnumUtils.lookup(ContributorAttributeSequence.class, sequenceText);
|
||||
|
||||
ContributorAttribute attribute = new ContributorAttribute(role, sequence);
|
||||
contributorAttributes.add(attribute);
|
||||
}
|
||||
|
||||
Contributor contributor = new Contributor(orcid, creditName, email, contributorAttributes);
|
||||
contributors.add(contributor);
|
||||
}
|
||||
|
||||
work.setContributors(contributors);
|
||||
}
|
||||
|
||||
protected void setUrl(Node node, Work work) throws XPathExpressionException {
|
||||
String url = XMLUtils.getTextContent(node, URL);
|
||||
work.setUrl(url);
|
||||
}
|
||||
|
||||
protected void setExternalIdentifiers(Node node, Work work) throws XPathExpressionException {
|
||||
|
||||
Iterator<Node> iterator = XMLUtils.getNodeListIterator(node, WORK_EXTERNAL_IDENTIFIER);
|
||||
while (iterator.hasNext()) {
|
||||
Node work_external_identifier = iterator.next();
|
||||
String typeText = XMLUtils.getTextContent(work_external_identifier, WORK_EXTERNAL_IDENTIFIER_TYPE);
|
||||
|
||||
WorkExternalIdentifierType type = EnumUtils.lookup(WorkExternalIdentifierType.class, typeText);
|
||||
|
||||
String id = XMLUtils.getTextContent(work_external_identifier, WORK_EXTERNAL_IDENTIFIER_ID);
|
||||
|
||||
WorkExternalIdentifier externalID = new WorkExternalIdentifier(type, id);
|
||||
work.setWorkExternalIdentifier(externalID);
|
||||
}
|
||||
}
|
||||
|
||||
protected void setPublicationDate(Node node, Work work) throws XPathExpressionException {
|
||||
|
||||
String year = XMLUtils.getTextContent(node, YEAR);
|
||||
String month = XMLUtils.getTextContent(node, MONTH);
|
||||
String day = XMLUtils.getTextContent(node, DAY);
|
||||
|
||||
String publicationDate = year;
|
||||
if (StringUtils.isNotBlank(month)) {
|
||||
publicationDate += "-" + month;
|
||||
if (StringUtils.isNotBlank(day)) {
|
||||
publicationDate += "-" + day;
|
||||
}
|
||||
}
|
||||
|
||||
work.setPublicationDate(publicationDate);
|
||||
}
|
||||
|
||||
protected void setWorkType(Node node, Work work) throws XPathExpressionException {
|
||||
|
||||
String workTypeText = XMLUtils.getTextContent(node, WORK_TYPE);
|
||||
WorkType workType = EnumUtils.lookup(WorkType.class, workTypeText);
|
||||
|
||||
work.setWorkType(workType);
|
||||
}
|
||||
|
||||
protected void setCitation(Node node, Work work) throws XPathExpressionException {
|
||||
|
||||
String typeText = XMLUtils.getTextContent(node, CITATION_TYPE);
|
||||
CitationType type = EnumUtils.lookup(CitationType.class, typeText);
|
||||
|
||||
String citationtext = XMLUtils.getTextContent(node, CITATION);
|
||||
|
||||
Citation citation = new Citation(type, citationtext);
|
||||
work.setCitation(citation);
|
||||
}
|
||||
|
||||
protected void setDescription(Node node, Work work) throws XPathExpressionException {
|
||||
|
||||
String description = null;
|
||||
description = XMLUtils.getTextContent(node, SHORT_DESCRIPTION);
|
||||
work.setDescription(description);
|
||||
}
|
||||
|
||||
protected void setTitle(Node node, Work work) throws XPathExpressionException {
|
||||
|
||||
String title = XMLUtils.getTextContent(node, TITLE);
|
||||
|
||||
String subtitle = XMLUtils.getTextContent(node, SUBTITLE);
|
||||
|
||||
Map<String, String> translatedTitles = new HashMap<String, String>();
|
||||
NodeList nodeList = XMLUtils.getNodeList(node, TRANSLATED_TITLES);
|
||||
Iterator<Node> iterator = XMLUtils.getNodeListIterator(nodeList);
|
||||
while (iterator.hasNext()) {
|
||||
Node languageNode = iterator.next();
|
||||
String language = XMLUtils.getTextContent(languageNode, TRANSLATED_TITLES_LANGUAGE);
|
||||
String translated_title = XMLUtils.getTextContent(languageNode, ".");
|
||||
translatedTitles.put(language, translated_title);
|
||||
}
|
||||
|
||||
WorkTitle workTitle = new WorkTitle(title, subtitle, translatedTitles);
|
||||
work.setWorkTitle(workTitle);
|
||||
}
|
||||
|
||||
}
|
@@ -7,13 +7,12 @@
|
||||
*/
|
||||
package org.dspace.authority.rest;
|
||||
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.dspace.authority.util.XMLUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.w3c.dom.Document;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.Scanner;
|
||||
@@ -38,26 +37,27 @@ public class RESTConnector {
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
public Document get(String path) {
|
||||
Document document = null;
|
||||
|
||||
public InputStream get(String path, String accessToken) {
|
||||
InputStream result = null;
|
||||
path = trimSlashes(path);
|
||||
|
||||
String fullPath = url + '/' + path;
|
||||
HttpGet httpGet = new HttpGet(fullPath);
|
||||
if(StringUtils.isNotBlank(accessToken)){
|
||||
httpGet.addHeader("Content-Type", "application/vnd.orcid+xml");
|
||||
httpGet.addHeader("Authorization","Bearer "+accessToken);
|
||||
}
|
||||
try {
|
||||
HttpClient httpClient = HttpClientBuilder.create().build();
|
||||
HttpResponse getResponse = httpClient.execute(httpGet);
|
||||
//do not close this httpClient
|
||||
result = getResponse.getEntity().getContent();
|
||||
document = XMLUtils.convertStreamToXML(result);
|
||||
|
||||
} catch (Exception e) {
|
||||
getGotError(e, fullPath);
|
||||
}
|
||||
|
||||
return document;
|
||||
return result;
|
||||
}
|
||||
|
||||
protected void getGotError(Exception e, String fullPath) {
|
||||
|
@@ -7,9 +7,7 @@
|
||||
*/
|
||||
package org.dspace.authority.rest;
|
||||
|
||||
import org.dspace.authority.AuthorityValue;
|
||||
|
||||
import java.util.List;
|
||||
import org.dspace.authority.SolrAuthorityInterface;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -18,7 +16,7 @@ import java.util.List;
|
||||
* @author Ben Bosman (ben at atmire dot com)
|
||||
* @author Mark Diggory (markd at atmire dot com)
|
||||
*/
|
||||
public abstract class RestSource {
|
||||
public abstract class RestSource implements SolrAuthorityInterface {
|
||||
|
||||
protected RESTConnector restConnector;
|
||||
|
||||
@@ -26,7 +24,4 @@ public abstract class RestSource {
|
||||
this.restConnector = new RESTConnector(url);
|
||||
}
|
||||
|
||||
public abstract List<AuthorityValue> queryAuthorities(String text, int max);
|
||||
|
||||
public abstract AuthorityValue queryAuthorityID(String id);
|
||||
}
|
||||
|
@@ -244,6 +244,12 @@ public class AuthorizeServiceImpl implements AuthorizeService
|
||||
return true;
|
||||
}
|
||||
|
||||
// If authorization was given before and cached
|
||||
Boolean cachedResult = c.getCachedAuthorizationResult(o, action, e);
|
||||
if (cachedResult != null) {
|
||||
return cachedResult.booleanValue();
|
||||
}
|
||||
|
||||
// is eperson set? if not, userToCheck = null (anonymous)
|
||||
EPerson userToCheck = null;
|
||||
if (e != null)
|
||||
@@ -254,8 +260,9 @@ public class AuthorizeServiceImpl implements AuthorizeService
|
||||
// if user is an Admin on this object
|
||||
DSpaceObject adminObject = useInheritance ? serviceFactory.getDSpaceObjectService(o).getAdminObject(c, o, action) : null;
|
||||
|
||||
if (isAdmin(c, adminObject))
|
||||
if (isAdmin(c, e, adminObject))
|
||||
{
|
||||
c.cacheAuthorizedAction(o, action, e, true, null);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -297,6 +304,11 @@ public class AuthorizeServiceImpl implements AuthorizeService
|
||||
if (ignoreCustomPolicies
|
||||
&& ResourcePolicy.TYPE_CUSTOM.equals(rp.getRpType()))
|
||||
{
|
||||
if(c.isReadOnly()) {
|
||||
//When we are in read-only mode, we will cache authorized actions in a different way
|
||||
//So we remove this resource policy from the cache.
|
||||
c.uncacheEntity(rp);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -305,20 +317,29 @@ public class AuthorizeServiceImpl implements AuthorizeService
|
||||
{
|
||||
if (rp.getEPerson() != null && rp.getEPerson().equals(userToCheck))
|
||||
{
|
||||
c.cacheAuthorizedAction(o, action, e, true, rp);
|
||||
return true; // match
|
||||
}
|
||||
|
||||
if ((rp.getGroup() != null)
|
||||
&& (groupService.isMember(c, rp.getGroup())))
|
||||
&& (groupService.isMember(c, e, rp.getGroup())))
|
||||
{
|
||||
// group was set, and eperson is a member
|
||||
// of that group
|
||||
c.cacheAuthorizedAction(o, action, e, true, rp);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if(c.isReadOnly()) {
|
||||
//When we are in read-only mode, we will cache authorized actions in a different way
|
||||
//So we remove this resource policy from the cache.
|
||||
c.uncacheEntity(rp);
|
||||
}
|
||||
}
|
||||
|
||||
// default authorization is denial
|
||||
c.cacheAuthorizedAction(o, action, e, false, null);
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -349,9 +370,14 @@ public class AuthorizeServiceImpl implements AuthorizeService
|
||||
@Override
|
||||
public boolean isAdmin(Context c, DSpaceObject o) throws SQLException
|
||||
{
|
||||
return this.isAdmin(c, c.getCurrentUser(), o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAdmin(Context c, EPerson e, DSpaceObject o) throws SQLException
|
||||
{
|
||||
// return true if user is an Administrator
|
||||
if (isAdmin(c))
|
||||
if (isAdmin(c, e))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
@@ -361,6 +387,11 @@ public class AuthorizeServiceImpl implements AuthorizeService
|
||||
return false;
|
||||
}
|
||||
|
||||
Boolean cachedResult = c.getCachedAuthorizationResult(o, Constants.ADMIN, e);
|
||||
if (cachedResult != null) {
|
||||
return cachedResult.booleanValue();
|
||||
}
|
||||
|
||||
//
|
||||
// First, check all Resource Policies directly on this object
|
||||
//
|
||||
@@ -371,19 +402,27 @@ public class AuthorizeServiceImpl implements AuthorizeService
|
||||
// check policies for date validity
|
||||
if (resourcePolicyService.isDateValid(rp))
|
||||
{
|
||||
if (rp.getEPerson() != null && rp.getEPerson().equals(c.getCurrentUser()))
|
||||
if (rp.getEPerson() != null && rp.getEPerson().equals(e))
|
||||
{
|
||||
c.cacheAuthorizedAction(o, Constants.ADMIN, e, true, rp);
|
||||
return true; // match
|
||||
}
|
||||
|
||||
if ((rp.getGroup() != null)
|
||||
&& (groupService.isMember(c, rp.getGroup())))
|
||||
&& (groupService.isMember(c, e, rp.getGroup())))
|
||||
{
|
||||
// group was set, and eperson is a member
|
||||
// of that group
|
||||
c.cacheAuthorizedAction(o, Constants.ADMIN, e, true, rp);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if(c.isReadOnly()) {
|
||||
//When we are in read-only mode, we will cache authorized actions in a different way
|
||||
//So we remove this resource policy from the cache.
|
||||
c.uncacheEntity(rp);
|
||||
}
|
||||
}
|
||||
|
||||
// If user doesn't have specific Admin permissions on this object,
|
||||
@@ -393,9 +432,12 @@ public class AuthorizeServiceImpl implements AuthorizeService
|
||||
DSpaceObject parent = serviceFactory.getDSpaceObjectService(o).getParentObject(c, o);
|
||||
if (parent != null)
|
||||
{
|
||||
return isAdmin(c, parent);
|
||||
boolean admin = isAdmin(c, e, parent);
|
||||
c.cacheAuthorizedAction(o, Constants.ADMIN, e, admin, null);
|
||||
return admin;
|
||||
}
|
||||
|
||||
c.cacheAuthorizedAction(o, Constants.ADMIN, e, false, null);
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -418,7 +460,23 @@ public class AuthorizeServiceImpl implements AuthorizeService
|
||||
return groupService.isMember(c, Group.ADMIN);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAdmin(Context c, EPerson e) throws SQLException
|
||||
{
|
||||
// if we're ignoring authorization, user is member of admin
|
||||
if (c.ignoreAuthorization())
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (e == null)
|
||||
{
|
||||
return false; // anonymous users can't be admins....
|
||||
} else
|
||||
{
|
||||
return groupService.isMember(c, e, Group.ADMIN);
|
||||
}
|
||||
}
|
||||
public boolean isCommunityAdmin(Context c) throws SQLException
|
||||
{
|
||||
EPerson e = c.getCurrentUser();
|
||||
@@ -624,7 +682,7 @@ public class AuthorizeServiceImpl implements AuthorizeService
|
||||
|
||||
List<Group> groups = new ArrayList<Group>();
|
||||
for (ResourcePolicy resourcePolicy : policies) {
|
||||
if(resourcePolicy.getGroup() != null)
|
||||
if(resourcePolicy.getGroup() != null && resourcePolicyService.isDateValid(resourcePolicy))
|
||||
{
|
||||
groups.add(resourcePolicy.getGroup());
|
||||
}
|
||||
@@ -642,13 +700,14 @@ public class AuthorizeServiceImpl implements AuthorizeService
|
||||
@Override
|
||||
public boolean isAnIdenticalPolicyAlreadyInPlace(Context c, DSpaceObject dso, Group group, int action, int policyID) throws SQLException
|
||||
{
|
||||
return findByTypeIdGroupAction(c, dso, group, action, policyID) != null;
|
||||
return !resourcePolicyService.findByTypeGroupActionExceptId(c, dso, group, action, policyID).isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResourcePolicy findByTypeIdGroupAction(Context c, DSpaceObject dso, Group group, int action, int policyID) throws SQLException
|
||||
public ResourcePolicy findByTypeGroupAction(Context c, DSpaceObject dso, Group group, int action)
|
||||
throws SQLException
|
||||
{
|
||||
List<ResourcePolicy> policies = resourcePolicyService.find(c, dso, group, action, policyID);
|
||||
List<ResourcePolicy> policies = resourcePolicyService.find(c, dso, group, action);
|
||||
|
||||
if (CollectionUtils.isNotEmpty(policies))
|
||||
{
|
||||
@@ -658,7 +717,6 @@ public class AuthorizeServiceImpl implements AuthorizeService
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Generate Policies policies READ for the date in input adding reason. New policies are assigned automatically at the groups that
|
||||
* have right on the collection. E.g., if the anonymous can access the collection policies are assigned to anonymous.
|
||||
@@ -734,12 +792,19 @@ public class AuthorizeServiceImpl implements AuthorizeService
|
||||
public ResourcePolicy createOrModifyPolicy(ResourcePolicy policy, Context context, String name, Group group, EPerson ePerson,
|
||||
Date embargoDate, int action, String reason, DSpaceObject dso) throws AuthorizeException, SQLException
|
||||
{
|
||||
ResourcePolicy policyTemp = null;
|
||||
if (policy != null)
|
||||
{
|
||||
List<ResourcePolicy> duplicates = resourcePolicyService.findByTypeGroupActionExceptId(context, dso, group, action, policy.getID());
|
||||
if (!duplicates.isEmpty())
|
||||
{
|
||||
policy = duplicates.get(0);
|
||||
}
|
||||
} else {
|
||||
// if an identical policy (same Action and same Group) is already in place modify it...
|
||||
policyTemp = findByTypeGroupAction(context, dso, group, action);
|
||||
}
|
||||
|
||||
int policyID = -1;
|
||||
if (policy != null) policyID = policy.getID();
|
||||
|
||||
// if an identical policy (same Action and same Group) is already in place modify it...
|
||||
ResourcePolicy policyTemp = findByTypeIdGroupAction(context, dso, group, action, policyID);
|
||||
if (policyTemp != null)
|
||||
{
|
||||
policy = policyTemp;
|
||||
|
@@ -218,9 +218,9 @@ public class ResourcePolicy implements ReloadableEntity<Integer> {
|
||||
}
|
||||
|
||||
/**
|
||||
* gets ID for Group referred to by this policy
|
||||
* gets the Group referred to by this policy
|
||||
*
|
||||
* @return groupID, or null if no group set
|
||||
* @return group, or null if no group set
|
||||
*/
|
||||
public Group getGroup()
|
||||
{
|
||||
@@ -228,7 +228,7 @@ public class ResourcePolicy implements ReloadableEntity<Integer> {
|
||||
}
|
||||
|
||||
/**
|
||||
* sets ID for Group referred to by this policy
|
||||
* sets the Group referred to by this policy
|
||||
* @param epersonGroup Group
|
||||
*/
|
||||
public void setGroup(Group epersonGroup)
|
||||
|
@@ -104,13 +104,22 @@ public class ResourcePolicyServiceImpl implements ResourcePolicyService
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ResourcePolicy> find(Context c, DSpaceObject dso, Group group, int action, int notPolicyID) throws SQLException {
|
||||
return resourcePolicyDAO.findByTypeIdGroupAction(c, dso, group, action, notPolicyID);
|
||||
public List<ResourcePolicy> find(Context c, DSpaceObject dso, Group group, int action) throws SQLException {
|
||||
return resourcePolicyDAO.findByTypeGroupAction(c, dso, group, action);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ResourcePolicy> find(Context c, EPerson e, List<Group> groups, int action, int type_id) throws SQLException{
|
||||
return resourcePolicyDAO.findByEPersonGroupTypeIdAction(c, e, groups, action, type_id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ResourcePolicy> findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group, int action, int notPolicyID)
|
||||
throws SQLException
|
||||
{
|
||||
return resourcePolicyDAO.findByTypeGroupActionExceptId(context, dso, group, action, notPolicyID);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Delete an ResourcePolicy
|
||||
|
@@ -34,7 +34,16 @@ public interface ResourcePolicyDAO extends GenericDAO<ResourcePolicy> {
|
||||
|
||||
public List<ResourcePolicy> findByDSoAndAction(Context context, DSpaceObject dso, int actionId) throws SQLException;
|
||||
|
||||
public List<ResourcePolicy> findByTypeIdGroupAction(Context context, DSpaceObject dso, Group group, int action, int notPolicyID) throws SQLException;
|
||||
public List<ResourcePolicy> findByTypeGroupAction(Context context, DSpaceObject dso, Group group, int action) throws SQLException;
|
||||
|
||||
/**
|
||||
* Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring IDs with a specific PolicyID.
|
||||
* This method can be used to detect duplicate ResourcePolicies.
|
||||
* @param notPolicyID ResourcePolicies with this ID will be ignored while looking out for equal ResourcePolicies.
|
||||
* @return List of resource policies for the same DSpaceObject, group and action but other policyID.
|
||||
* @throws SQLException
|
||||
*/
|
||||
public List<ResourcePolicy> findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group, int action, int notPolicyID) throws SQLException;
|
||||
|
||||
public List<ResourcePolicy> findByEPersonGroupTypeIdAction(Context context, EPerson e, List<Group> groups, int action, int type_id) throws SQLException;
|
||||
|
||||
|
@@ -75,7 +75,7 @@ public class ResourcePolicyDAOImpl extends AbstractHibernateDAO<ResourcePolicy>
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ResourcePolicy> findByTypeIdGroupAction(Context context, DSpaceObject dso, Group group, int action, int notPolicyID) throws SQLException {
|
||||
public List<ResourcePolicy> findByTypeGroupAction(Context context, DSpaceObject dso, Group group, int action) throws SQLException {
|
||||
Criteria criteria = createCriteria(context, ResourcePolicy.class);
|
||||
criteria.add(Restrictions.and(
|
||||
Restrictions.eq("dSpaceObject", dso),
|
||||
@@ -83,15 +83,21 @@ public class ResourcePolicyDAOImpl extends AbstractHibernateDAO<ResourcePolicy>
|
||||
Restrictions.eq("actionId", action)
|
||||
));
|
||||
criteria.setMaxResults(1);
|
||||
|
||||
List<ResourcePolicy> results;
|
||||
if (notPolicyID != -1)
|
||||
{
|
||||
criteria.add(Restrictions.and(Restrictions.not(Restrictions.eq("id", notPolicyID))));
|
||||
}
|
||||
|
||||
return list(criteria);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ResourcePolicy> findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group, int action, int notPolicyID) throws SQLException {
|
||||
Criteria criteria = createCriteria(context, ResourcePolicy.class);
|
||||
criteria.add(Restrictions.and(
|
||||
Restrictions.eq("dSpaceObject", dso),
|
||||
Restrictions.eq("epersonGroup", group),
|
||||
Restrictions.eq("actionId", action)
|
||||
));
|
||||
criteria.add(Restrictions.and(Restrictions.not(Restrictions.eq("id", notPolicyID))));
|
||||
return list(criteria);
|
||||
}
|
||||
|
||||
public List<ResourcePolicy> findByEPersonGroupTypeIdAction(Context context, EPerson e, List<Group> groups, int action, int type_id) throws SQLException
|
||||
{
|
||||
Criteria criteria = createCriteria(context, ResourcePolicy.class);
|
||||
|
@@ -167,11 +167,28 @@ public interface AuthorizeService {
|
||||
*/
|
||||
public boolean isAdmin(Context c, DSpaceObject o) throws SQLException;
|
||||
|
||||
/**
|
||||
* Check to see if a specific user is an Administrator of a given object
|
||||
* within DSpace. Always return {@code true} if the user is a System
|
||||
* Admin
|
||||
*
|
||||
* @param c current context
|
||||
* @param e the user to check
|
||||
* @param o current DSpace Object, if <code>null</code> the call will be
|
||||
* equivalent to a call to the <code>isAdmin(Context c)</code>
|
||||
* method
|
||||
* @return {@code true} if the user has administrative privileges on the
|
||||
* given DSpace object
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public boolean isAdmin(Context c, EPerson e, DSpaceObject o) throws SQLException;
|
||||
|
||||
|
||||
/**
|
||||
* Check to see if the current user is a System Admin. Always return
|
||||
* {@code true} if c.ignoreAuthorization is set. Anonymous users
|
||||
* can't be Admins (EPerson set to NULL)
|
||||
* {@code true} if c.ignoreAuthorization is set. If no EPerson is
|
||||
* logged in and context.getCurrentUser() returns null, this method
|
||||
* returns false as anonymous users can never be administrators.
|
||||
*
|
||||
* @param c current context
|
||||
* @return {@code true} if user is an admin or ignore authorization
|
||||
@@ -179,6 +196,17 @@ public interface AuthorizeService {
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public boolean isAdmin(Context c) throws SQLException;
|
||||
|
||||
/**
|
||||
* Check to see if a specific user is system admin. Always return
|
||||
* {@code true} if c.ignoreAuthorization is set.
|
||||
*
|
||||
* @param c current context
|
||||
* @return {@code true} if user is an admin or ignore authorization
|
||||
* flag set
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public boolean isAdmin(Context c, EPerson e) throws SQLException;
|
||||
|
||||
public boolean isCommunityAdmin(Context c) throws SQLException;
|
||||
|
||||
@@ -410,8 +438,8 @@ public interface AuthorizeService {
|
||||
* @throws SQLException if there's a database problem
|
||||
*/
|
||||
public boolean isAnIdenticalPolicyAlreadyInPlace(Context c, DSpaceObject o, Group group, int actionID, int policyID) throws SQLException;
|
||||
|
||||
public ResourcePolicy findByTypeIdGroupAction(Context c, DSpaceObject dso, Group group, int action, int policyID) throws SQLException;
|
||||
|
||||
public ResourcePolicy findByTypeGroupAction(Context c, DSpaceObject dso, Group group, int action) throws SQLException;
|
||||
|
||||
|
||||
/**
|
||||
|
@@ -33,11 +33,21 @@ public interface ResourcePolicyService extends DSpaceCRUDService<ResourcePolicy>
|
||||
|
||||
public List<ResourcePolicy> find(Context c, DSpaceObject o, int actionId) throws SQLException;
|
||||
|
||||
public List<ResourcePolicy> find(Context c, DSpaceObject dso, Group group, int action, int notPolicyID) throws SQLException;
|
||||
public List<ResourcePolicy> find(Context c, DSpaceObject dso, Group group, int action) throws SQLException;
|
||||
|
||||
public List<ResourcePolicy> find(Context context, Group group) throws SQLException;
|
||||
|
||||
public List<ResourcePolicy> find(Context c, EPerson e, List<Group> groups, int action, int type_id) throws SQLException;
|
||||
|
||||
/**
|
||||
* Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring IDs with a specific PolicyID.
|
||||
* This method can be used to detect duplicate ResourcePolicies.
|
||||
* @param notPolicyID ResourcePolicies with this ID will be ignored while looking out for equal ResourcePolicies.
|
||||
* @return List of resource policies for the same DSpaceObject, group and action but other policyID.
|
||||
* @throws SQLException
|
||||
*/
|
||||
public List<ResourcePolicy> findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group, int action, int notPolicyID)
|
||||
throws SQLException;
|
||||
|
||||
public String getActionText(ResourcePolicy resourcePolicy);
|
||||
|
||||
|
@@ -393,4 +393,8 @@ public interface BrowseDAO
|
||||
public boolean isEnableBrowseFrequencies();
|
||||
|
||||
public void setEnableBrowseFrequencies(boolean enableBrowseFrequencies);
|
||||
|
||||
public void setStartsWith(String startsWith);
|
||||
|
||||
public String getStartsWith();
|
||||
}
|
||||
|
@@ -7,18 +7,19 @@
|
||||
*/
|
||||
package org.dspace.browse;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.sort.SortOption;
|
||||
import org.dspace.sort.OrderFormat;
|
||||
import org.dspace.sort.SortOption;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* This class does most of the actual grunt work of preparing a browse
|
||||
@@ -408,7 +409,7 @@ public class BrowseEngine
|
||||
// get the table name that we are going to be getting our data from
|
||||
// this is the distinct table constrained to either community or collection
|
||||
dao.setTable(browseIndex.getDistinctTableName());
|
||||
|
||||
dao.setStartsWith(StringUtils.lowerCase(scope.getStartsWith()));
|
||||
// remind the DAO that this is a distinct value browse, so it knows what sort
|
||||
// of query to build
|
||||
dao.setDistinct(true);
|
||||
@@ -463,15 +464,8 @@ public class BrowseEngine
|
||||
String rawFocusValue = null;
|
||||
if (offset < 1 && scope.hasJumpToValue() || scope.hasStartsWith())
|
||||
{
|
||||
String focusValue = getJumpToValue();
|
||||
|
||||
// store the value to tell the Browse Info object which value we are browsing on
|
||||
rawFocusValue = focusValue;
|
||||
|
||||
// make sure the incoming value is normalised
|
||||
focusValue = normalizeJumpToValue(focusValue);
|
||||
|
||||
offset = getOffsetForDistinctValue(focusValue);
|
||||
rawFocusValue = getJumpToValue();
|
||||
}
|
||||
|
||||
|
||||
|
@@ -7,10 +7,7 @@
|
||||
*/
|
||||
package org.dspace.browse;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.sql.SQLException;
|
||||
import java.util.*;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.authorize.factory.AuthorizeServiceFactory;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
@@ -18,17 +15,17 @@ import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.discovery.DiscoverFacetField;
|
||||
import org.dspace.discovery.DiscoverQuery;
|
||||
import org.dspace.discovery.*;
|
||||
import org.dspace.discovery.DiscoverQuery.SORT_ORDER;
|
||||
import org.dspace.discovery.DiscoverResult;
|
||||
import org.dspace.discovery.DiscoverResult.FacetResult;
|
||||
import org.dspace.discovery.DiscoverResult.SearchDocument;
|
||||
import org.dspace.discovery.SearchService;
|
||||
import org.dspace.discovery.SearchServiceException;
|
||||
import org.dspace.discovery.configuration.DiscoveryConfigurationParameters;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.sql.SQLException;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author Andrea Bollini (CILEA)
|
||||
@@ -85,6 +82,8 @@ public class SolrBrowseDAO implements BrowseDAO
|
||||
/** value to start browse from in focus field */
|
||||
private String focusValue = null;
|
||||
|
||||
private String startsWith = null;
|
||||
|
||||
/** field to look for value in */
|
||||
private String valueField = null;
|
||||
|
||||
@@ -152,9 +151,16 @@ public class SolrBrowseDAO implements BrowseDAO
|
||||
addStatusFilter(query);
|
||||
if (distinct)
|
||||
{
|
||||
DiscoverFacetField dff = new DiscoverFacetField(facetField,
|
||||
DiscoverFacetField dff;
|
||||
if (StringUtils.isNotBlank(startsWith)) {
|
||||
dff = new DiscoverFacetField(facetField,
|
||||
DiscoveryConfigurationParameters.TYPE_TEXT, -1,
|
||||
DiscoveryConfigurationParameters.SORT.VALUE, startsWith);
|
||||
} else {
|
||||
dff = new DiscoverFacetField(facetField,
|
||||
DiscoveryConfigurationParameters.TYPE_TEXT, -1,
|
||||
DiscoveryConfigurationParameters.SORT.VALUE);
|
||||
}
|
||||
query.addFacetField(dff);
|
||||
query.setFacetMinCount(1);
|
||||
query.setMaxResults(0);
|
||||
@@ -486,6 +492,16 @@ public class SolrBrowseDAO implements BrowseDAO
|
||||
return focusValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setStartsWith(String startsWith) {
|
||||
this.startsWith = startsWith;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getStartsWith() {
|
||||
return startsWith;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
|
@@ -7,11 +7,6 @@
|
||||
*/
|
||||
package org.dspace.checker;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.collections.MapUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.checker.factory.CheckerServiceFactory;
|
||||
@@ -23,6 +18,11 @@ import org.dspace.core.Context;
|
||||
import org.dspace.storage.bitstore.factory.StorageServiceFactory;
|
||||
import org.dspace.storage.bitstore.service.BitstreamStorageService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Main class for the checksum checker tool, which calculates checksums for each
|
||||
@@ -127,6 +127,7 @@ public final class CheckerCommand
|
||||
collector.collect(context, info);
|
||||
}
|
||||
|
||||
context.uncacheEntity(bitstream);
|
||||
bitstream = dispatcher.next();
|
||||
}
|
||||
}
|
||||
|
@@ -55,7 +55,7 @@ public class ChecksumHistory implements ReloadableEntity<Long>
|
||||
private String checksumCalculated;
|
||||
|
||||
@ManyToOne
|
||||
@JoinColumn(name = "result")
|
||||
@JoinColumn(name = "result", referencedColumnName = "result_code")
|
||||
private ChecksumResult checksumResult;
|
||||
|
||||
|
||||
|
@@ -7,20 +7,7 @@
|
||||
*/
|
||||
package org.dspace.checker;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
import java.util.GregorianCalendar;
|
||||
import javax.mail.MessagingException;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.commons.cli.*;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.checker.factory.CheckerServiceFactory;
|
||||
import org.dspace.checker.service.SimpleReporterService;
|
||||
@@ -28,6 +15,14 @@ import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.Email;
|
||||
|
||||
import javax.mail.MessagingException;
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
import java.util.GregorianCalendar;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* The email reporter creates and sends emails to an administrator. This only
|
||||
@@ -184,7 +179,7 @@ public class DailyReportEmailer
|
||||
|
||||
try
|
||||
{
|
||||
context = new Context();
|
||||
context = new Context(Context.Mode.READ_ONLY);
|
||||
|
||||
// the number of bitstreams in report
|
||||
int numBitstreams = 0;
|
||||
|
@@ -7,6 +7,8 @@
|
||||
*/
|
||||
package org.dspace.checker;
|
||||
|
||||
import org.apache.commons.lang.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang.builder.HashCodeBuilder;
|
||||
import org.dspace.content.Bitstream;
|
||||
|
||||
import javax.persistence.*;
|
||||
@@ -57,7 +59,7 @@ public class MostRecentChecksum implements Serializable
|
||||
private boolean bitstreamFound;
|
||||
|
||||
@OneToOne
|
||||
@JoinColumn(name= "result")
|
||||
@JoinColumn(name= "result", referencedColumnName = "result_code")
|
||||
private ChecksumResult checksumResult;
|
||||
|
||||
/**
|
||||
@@ -155,4 +157,44 @@ public class MostRecentChecksum implements Serializable
|
||||
public void setBitstreamFound(boolean bitstreamFound) {
|
||||
this.bitstreamFound = bitstreamFound;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
MostRecentChecksum that = (MostRecentChecksum) o;
|
||||
|
||||
return new EqualsBuilder()
|
||||
.append(toBeProcessed, that.toBeProcessed)
|
||||
.append(matchedPrevChecksum, that.matchedPrevChecksum)
|
||||
.append(infoFound, that.infoFound)
|
||||
.append(bitstreamFound, that.bitstreamFound)
|
||||
.append(bitstream, that.bitstream)
|
||||
.append(expectedChecksum, that.expectedChecksum)
|
||||
.append(currentChecksum, that.currentChecksum)
|
||||
.append(processStartDate, that.processStartDate)
|
||||
.append(processEndDate, that.processEndDate)
|
||||
.append(checksumAlgorithm, that.checksumAlgorithm)
|
||||
.append(checksumResult, that.checksumResult)
|
||||
.isEquals();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return new HashCodeBuilder(17, 37)
|
||||
.append(bitstream)
|
||||
.append(toBeProcessed)
|
||||
.append(expectedChecksum)
|
||||
.append(currentChecksum)
|
||||
.append(processStartDate)
|
||||
.append(processEndDate)
|
||||
.append(checksumAlgorithm)
|
||||
.append(matchedPrevChecksum)
|
||||
.append(infoFound)
|
||||
.append(bitstreamFound)
|
||||
.append(checksumResult)
|
||||
.toHashCode();
|
||||
}
|
||||
}
|
||||
|
@@ -145,17 +145,14 @@ public final class ResultsPruner
|
||||
throw new IllegalStateException("Problem parsing duration: "
|
||||
+ e.getMessage(), e);
|
||||
}
|
||||
ChecksumResultCode code = ChecksumResultCode.valueOf(resultCode);
|
||||
if(code == null)
|
||||
{
|
||||
throw new IllegalStateException("Checksum result code not found: " + resultCode);
|
||||
}
|
||||
if ("default".equals(resultCode))
|
||||
{
|
||||
if ("default".equals(resultCode)) {
|
||||
rp.setDefaultDuration(duration);
|
||||
}
|
||||
else
|
||||
{
|
||||
} else {
|
||||
ChecksumResultCode code = ChecksumResultCode.valueOf(resultCode);
|
||||
if (code == null) {
|
||||
throw new IllegalStateException("Checksum result code not found: " + resultCode);
|
||||
}
|
||||
|
||||
rp.addInterested(code, duration);
|
||||
}
|
||||
}
|
||||
|
@@ -166,7 +166,7 @@ public class BitstreamFormat implements Serializable, ReloadableEntity<Integer>
|
||||
*
|
||||
* @return the MIME type
|
||||
*/
|
||||
public final String getMIMEType()
|
||||
public String getMIMEType()
|
||||
{
|
||||
return mimetype;
|
||||
}
|
||||
@@ -177,7 +177,7 @@ public class BitstreamFormat implements Serializable, ReloadableEntity<Integer>
|
||||
* @param s
|
||||
* the new MIME type
|
||||
*/
|
||||
public final void setMIMEType(String s)
|
||||
public void setMIMEType(String s)
|
||||
{
|
||||
this.mimetype = s;
|
||||
}
|
||||
|
@@ -95,6 +95,37 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
|
||||
return bitstreamDAO.findAll(context, Bitstream.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bitstream clone(Context context, Bitstream bitstream)
|
||||
throws SQLException
|
||||
{
|
||||
// Create a new bitstream with a new ID.
|
||||
Bitstream clonedBitstream = bitstreamDAO.create(context, new Bitstream());
|
||||
// Set the internal identifier, file size, checksum, and
|
||||
// checksum algorithm as same as the given bitstream.
|
||||
clonedBitstream.setInternalId(bitstream.getInternalId());
|
||||
clonedBitstream.setSizeBytes(bitstream.getSize());
|
||||
clonedBitstream.setChecksum(bitstream.getChecksum());
|
||||
clonedBitstream.setChecksumAlgorithm(bitstream.getChecksumAlgorithm());
|
||||
|
||||
try
|
||||
{
|
||||
//Update our bitstream but turn off the authorization system since permissions haven't been set at this point in time.
|
||||
context.turnOffAuthorisationSystem();
|
||||
update(context, clonedBitstream);
|
||||
}
|
||||
catch (AuthorizeException e)
|
||||
{
|
||||
log.error(e);
|
||||
//Can never happen since we turn off authorization before we update
|
||||
}
|
||||
finally
|
||||
{
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
return clonedBitstream;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bitstream create(Context context, InputStream is) throws IOException, SQLException {
|
||||
// Store the bits
|
||||
@@ -248,21 +279,21 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
|
||||
context.addEvent(new Event(Event.DELETE, Constants.BITSTREAM, bitstream.getID(),
|
||||
String.valueOf(bitstream.getSequenceID()), getIdentifiers(context, bitstream)));
|
||||
|
||||
//Remove our bitstream from all our bundles
|
||||
final List<Bundle> bundles = bitstream.getBundles();
|
||||
for (Bundle bundle : bundles) {
|
||||
bundle.getBitstreams().remove(bitstream);
|
||||
}
|
||||
|
||||
// Remove policies
|
||||
authorizeService.removeAllPolicies(context, bitstream);
|
||||
|
||||
// Remove bitstream itself
|
||||
bitstream.setDeleted(true);
|
||||
update(context, bitstream);
|
||||
|
||||
//Remove our bitstream from all our bundles
|
||||
final List<Bundle> bundles = bitstream.getBundles();
|
||||
for (Bundle bundle : bundles) {
|
||||
bundle.removeBitstream(bitstream);
|
||||
}
|
||||
|
||||
//Remove all bundles from the bitstream object, clearing the connection in 2 ways
|
||||
bundles.clear();
|
||||
|
||||
// Remove policies only after the bitstream has been updated (otherwise the current user has not WRITE rights)
|
||||
authorizeService.removeAllPolicies(context, bitstream);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -10,6 +10,7 @@ package org.dspace.content;
|
||||
import java.sql.SQLException;
|
||||
import java.util.*;
|
||||
|
||||
import org.apache.commons.collections.CollectionUtils;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.BundleService;
|
||||
import org.dspace.core.Constants;
|
||||
@@ -130,18 +131,40 @@ public class Bundle extends DSpaceObject implements DSpaceObjectLegacySupport
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the bitstreams in this bundle
|
||||
* Get a copy of the bitstream list of this bundle
|
||||
* Note that this is a copy and if you wish to manipulate the bistream list, you should use
|
||||
* {@ref Bundle.addBitstream}, {@ref Bundle.removeBitstream} or {@ref Bundle.clearBitstreams}
|
||||
*
|
||||
* @return the bitstreams
|
||||
*/
|
||||
public List<Bitstream> getBitstreams() {
|
||||
return bitstreams;
|
||||
List<Bitstream> bitstreamList = new LinkedList<>(this.bitstreams);
|
||||
return bitstreamList;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new bitstream to this bundle.
|
||||
* @param bitstream
|
||||
*/
|
||||
void addBitstream(Bitstream bitstream){
|
||||
bitstreams.add(bitstream);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear the list of bitstream of this bundle
|
||||
*/
|
||||
public void clearBitstreams() {
|
||||
bitstreams.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove the given bitstream from this bundles bitstream list
|
||||
* @param bitstream The bitstream to remove
|
||||
*/
|
||||
public void removeBitstream(Bitstream bitstream) {
|
||||
bitstreams.remove(bitstream);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the items this bundle appears in
|
||||
*
|
||||
@@ -215,5 +238,4 @@ public class Bundle extends DSpaceObject implements DSpaceObjectLegacySupport
|
||||
}
|
||||
return bundleService;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -147,6 +147,14 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
//Ensure that the last modified from the item is triggered !
|
||||
Item owningItem = (Item) getParentObject(context, bundle);
|
||||
if(owningItem != null)
|
||||
{
|
||||
itemService.updateLastModified(context, owningItem);
|
||||
itemService.update(context, owningItem);
|
||||
}
|
||||
|
||||
bundle.addBitstream(bitstream);
|
||||
bitstream.getBundles().add(bundle);
|
||||
@@ -191,14 +199,18 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
||||
bundle.unsetPrimaryBitstreamID();
|
||||
}
|
||||
|
||||
// Check if we our bitstream is part of a single bundle:
|
||||
// If so delete it, if not then remove the link between bundle & bitstream
|
||||
if(bitstream.getBundles().size() == 1)
|
||||
// Check if our bitstream is part of a single or no bundle.
|
||||
// Bitstream.getBundles() may be empty (the delete() method clears
|
||||
// the bundles). We should not delete the bitstream, if it is used
|
||||
// in another bundle, instead we just remove the link between bitstream
|
||||
// and this bundle.
|
||||
if(bitstream.getBundles().size() <= 1)
|
||||
{
|
||||
// We don't need to remove the link between bundle & bitstream, this will be handled in the delete() method.
|
||||
// We don't need to remove the link between bundle & bitstream,
|
||||
// this will be handled in the delete() method.
|
||||
bitstreamService.delete(context, bitstream);
|
||||
}else{
|
||||
bundle.getBitstreams().remove(bitstream);
|
||||
bundle.removeBitstream(bitstream);
|
||||
bitstream.getBundles().remove(bundle);
|
||||
}
|
||||
}
|
||||
@@ -269,29 +281,60 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
||||
public void setOrder(Context context, Bundle bundle, UUID[] bitstreamIds) throws AuthorizeException, SQLException {
|
||||
authorizeService.authorizeAction(context, bundle, Constants.WRITE);
|
||||
|
||||
bundle.getBitstreams().clear();
|
||||
List<Bitstream> currentBitstreams = bundle.getBitstreams();
|
||||
List<Bitstream> updatedBitstreams = new ArrayList<Bitstream>();
|
||||
|
||||
// Loop through and ensure these Bitstream IDs are all valid. Add them to list of updatedBitstreams.
|
||||
for (int i = 0; i < bitstreamIds.length; i++) {
|
||||
UUID bitstreamId = bitstreamIds[i];
|
||||
Bitstream bitstream = bitstreamService.find(context, bitstreamId);
|
||||
if(bitstream == null){
|
||||
|
||||
// If we have an invalid Bitstream ID, just ignore it, but log a warning
|
||||
if(bitstream == null) {
|
||||
//This should never occur but just in case
|
||||
log.warn(LogManager.getHeader(context, "Invalid bitstream id while changing bitstream order", "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId));
|
||||
continue;
|
||||
}
|
||||
bitstream.getBundles().remove(bundle);
|
||||
bundle.getBitstreams().add(bitstream);
|
||||
bitstream.getBundles().add(bundle);
|
||||
|
||||
bitstreamService.update(context, bitstream);
|
||||
// If we have a Bitstream not in the current list, log a warning & exit immediately
|
||||
if(!currentBitstreams.contains(bitstream))
|
||||
{
|
||||
log.warn(LogManager.getHeader(context, "Encountered a bitstream not in this bundle while changing bitstream order. Bitstream order will not be changed.", "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId));
|
||||
return;
|
||||
}
|
||||
updatedBitstreams.add(bitstream);
|
||||
}
|
||||
|
||||
//The order of the bitstreams has changed, ensure that we update the last modified of our item
|
||||
Item owningItem = (Item) getParentObject(context, bundle);
|
||||
if(owningItem != null)
|
||||
// If our lists are different sizes, exit immediately
|
||||
if(updatedBitstreams.size()!=currentBitstreams.size())
|
||||
{
|
||||
itemService.updateLastModified(context, owningItem);
|
||||
itemService.update(context, owningItem);
|
||||
log.warn(LogManager.getHeader(context, "Size of old list and new list do not match. Bitstream order will not be changed.", "Bundle: " + bundle.getID()));
|
||||
return;
|
||||
}
|
||||
|
||||
// As long as the order has changed, update it
|
||||
if(CollectionUtils.isNotEmpty(updatedBitstreams) && !updatedBitstreams.equals(currentBitstreams))
|
||||
{
|
||||
//First clear out the existing list of bitstreams
|
||||
bundle.clearBitstreams();
|
||||
|
||||
// Now add them back in the proper order
|
||||
for (Bitstream bitstream : updatedBitstreams)
|
||||
{
|
||||
bitstream.getBundles().remove(bundle);
|
||||
bundle.addBitstream(bitstream);
|
||||
bitstream.getBundles().add(bundle);
|
||||
bitstreamService.update(context, bitstream);
|
||||
}
|
||||
|
||||
//The order of the bitstreams has changed, ensure that we update the last modified of our item
|
||||
Item owningItem = (Item) getParentObject(context, bundle);
|
||||
if(owningItem != null)
|
||||
{
|
||||
itemService.updateLastModified(context, owningItem);
|
||||
itemService.update(context, owningItem);
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -399,16 +442,15 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
||||
bundle.getName(), getIdentifiers(context, bundle)));
|
||||
|
||||
// Remove bitstreams
|
||||
Iterator<Bitstream> bitstreams = bundle.getBitstreams().iterator();
|
||||
while (bitstreams.hasNext()) {
|
||||
Bitstream bitstream = bitstreams.next();
|
||||
bitstreams.remove();
|
||||
List<Bitstream> bitstreams = bundle.getBitstreams();
|
||||
bundle.clearBitstreams();
|
||||
for (Bitstream bitstream : bitstreams) {
|
||||
removeBitstream(context, bundle, bitstream);
|
||||
}
|
||||
|
||||
Iterator<Item> items = bundle.getItems().iterator();
|
||||
while (items.hasNext()) {
|
||||
Item item = items.next();
|
||||
|
||||
List<Item> items = new LinkedList<>(bundle.getItems());
|
||||
bundle.getItems().clear();
|
||||
for (Item item : items) {
|
||||
item.removeBundle(bundle);
|
||||
}
|
||||
|
||||
|
@@ -7,16 +7,18 @@
|
||||
*/
|
||||
package org.dspace.content;
|
||||
|
||||
import org.dspace.content.comparator.NameAscendingComparator;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.core.*;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.hibernate.annotations.CacheConcurrencyStrategy;
|
||||
import org.hibernate.proxy.HibernateProxyHelper;
|
||||
|
||||
import javax.persistence.*;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.*;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
|
||||
/**
|
||||
* Class representing a collection.
|
||||
@@ -34,6 +36,8 @@ import java.util.List;
|
||||
*/
|
||||
@Entity
|
||||
@Table(name="collection")
|
||||
@Cacheable
|
||||
@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, include = "non-lazy")
|
||||
public class Collection extends DSpaceObject implements DSpaceObjectLegacySupport
|
||||
{
|
||||
|
||||
@@ -83,7 +87,7 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
|
||||
joinColumns = {@JoinColumn(name = "collection_id") },
|
||||
inverseJoinColumns = {@JoinColumn(name = "community_id") }
|
||||
)
|
||||
private final List<Community> communities = new ArrayList<>();
|
||||
private Set<Community> communities = new HashSet<>();
|
||||
|
||||
@Transient
|
||||
private transient CollectionService collectionService;
|
||||
@@ -263,7 +267,11 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
|
||||
*/
|
||||
public List<Community> getCommunities() throws SQLException
|
||||
{
|
||||
return communities;
|
||||
// We return a copy because we do not want people to add elements to this collection directly.
|
||||
// We return a list to maintain backwards compatibility
|
||||
Community[] output = communities.toArray(new Community[]{});
|
||||
Arrays.sort(output, new NameAscendingComparator());
|
||||
return Arrays.asList(output);
|
||||
}
|
||||
|
||||
void addCommunity(Community community) {
|
||||
@@ -271,7 +279,7 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
|
||||
setModified();
|
||||
}
|
||||
|
||||
void removeCommunity(Community community){
|
||||
void removeCommunity(Community community) {
|
||||
this.communities.remove(community);
|
||||
setModified();
|
||||
}
|
||||
@@ -328,9 +336,10 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
|
||||
return Constants.COLLECTION;
|
||||
}
|
||||
|
||||
public void setWorkflowGroup(int step, Group g)
|
||||
public void setWorkflowGroup(Context context, int step, Group g)
|
||||
throws SQLException, AuthorizeException
|
||||
{
|
||||
getCollectionService().setWorkflowGroup(this, step, g);
|
||||
getCollectionService().setWorkflowGroup(context, this, step, g);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -345,4 +354,4 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
|
||||
}
|
||||
return collectionService;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -32,6 +32,7 @@ import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
import java.util.*;
|
||||
import org.dspace.authorize.service.ResourcePolicyService;
|
||||
|
||||
/**
|
||||
* Service implementation for the Collection object.
|
||||
@@ -51,6 +52,8 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
@Autowired(required = true)
|
||||
protected AuthorizeService authorizeService;
|
||||
@Autowired(required = true)
|
||||
protected ResourcePolicyService resourcePolicyService;
|
||||
@Autowired(required = true)
|
||||
protected BitstreamService bitstreamService;
|
||||
@Autowired(required = true)
|
||||
protected ItemService itemService;
|
||||
@@ -334,30 +337,77 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
groupService.setName(g,
|
||||
"COLLECTION_" + collection.getID() + "_WORKFLOW_STEP_" + step);
|
||||
groupService.update(context, g);
|
||||
setWorkflowGroup(collection, step, g);
|
||||
setWorkflowGroup(context, collection, step, g);
|
||||
|
||||
authorizeService.addPolicy(context, collection, Constants.ADD, g);
|
||||
}
|
||||
|
||||
return getWorkflowGroup(collection, step);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setWorkflowGroup(Collection collection, int step, Group group) {
|
||||
public void setWorkflowGroup(Context context, Collection collection, int step, Group group)
|
||||
throws SQLException, AuthorizeException
|
||||
{
|
||||
// we need to store the old group to be able to revoke permissions if granted before
|
||||
Group oldGroup = null;
|
||||
int action;
|
||||
|
||||
switch (step)
|
||||
{
|
||||
case 1:
|
||||
oldGroup = collection.getWorkflowStep1();
|
||||
action = Constants.WORKFLOW_STEP_1;
|
||||
collection.setWorkflowStep1(group);
|
||||
break;
|
||||
case 2:
|
||||
oldGroup = collection.getWorkflowStep2();
|
||||
action = Constants.WORKFLOW_STEP_2;
|
||||
collection.setWorkflowStep2(group);
|
||||
break;
|
||||
case 3:
|
||||
oldGroup = collection.getWorkflowStep3();
|
||||
action = Constants.WORKFLOW_STEP_3;
|
||||
collection.setWorkflowStep3(group);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Illegal step count: " + step);
|
||||
}
|
||||
|
||||
// deal with permissions.
|
||||
try
|
||||
{
|
||||
context.turnOffAuthorisationSystem();
|
||||
// remove the policies for the old group
|
||||
if (oldGroup != null)
|
||||
{
|
||||
Iterator<ResourcePolicy> oldPolicies =
|
||||
resourcePolicyService.find(context, collection, oldGroup, action).iterator();
|
||||
while (oldPolicies.hasNext())
|
||||
{
|
||||
resourcePolicyService.delete(context, oldPolicies.next());
|
||||
}
|
||||
oldPolicies = resourcePolicyService.find(context, collection, oldGroup, Constants.ADD).iterator();
|
||||
while (oldPolicies.hasNext())
|
||||
{
|
||||
ResourcePolicy rp = oldPolicies.next();
|
||||
if (rp.getRpType() == ResourcePolicy.TYPE_WORKFLOW)
|
||||
{
|
||||
resourcePolicyService.delete(context, rp);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// group can be null to delete workflow step.
|
||||
// we need to grant permissions if group is not null
|
||||
if (group != null)
|
||||
{
|
||||
authorizeService.addPolicy(context, collection, action, group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
authorizeService.addPolicy(context, collection, Constants.ADD, group, ResourcePolicy.TYPE_WORKFLOW);
|
||||
}
|
||||
} finally {
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
collection.setModified();
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -749,8 +799,8 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
while (owningCommunities.hasNext())
|
||||
{
|
||||
Community owningCommunity = owningCommunities.next();
|
||||
owningCommunities.remove();
|
||||
owningCommunity.getCollections().remove(collection);
|
||||
collection.removeCommunity(owningCommunity);
|
||||
owningCommunity.removeCollection(collection);
|
||||
}
|
||||
|
||||
collectionDAO.delete(context, collection);
|
||||
|
@@ -9,10 +9,12 @@ package org.dspace.content;
|
||||
|
||||
import org.apache.commons.lang.builder.HashCodeBuilder;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.content.comparator.NameAscendingComparator;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CommunityService;
|
||||
import org.dspace.core.*;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.hibernate.annotations.CacheConcurrencyStrategy;
|
||||
import org.hibernate.proxy.HibernateProxyHelper;
|
||||
|
||||
import javax.persistence.*;
|
||||
@@ -30,6 +32,8 @@ import java.util.*;
|
||||
*/
|
||||
@Entity
|
||||
@Table(name="community")
|
||||
@Cacheable
|
||||
@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, include = "non-lazy")
|
||||
public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
|
||||
{
|
||||
/** log4j category */
|
||||
@@ -44,13 +48,13 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
|
||||
joinColumns = {@JoinColumn(name = "parent_comm_id") },
|
||||
inverseJoinColumns = {@JoinColumn(name = "child_comm_id") }
|
||||
)
|
||||
private final List<Community> subCommunities = new ArrayList<>();
|
||||
private Set<Community> subCommunities = new HashSet<>();
|
||||
|
||||
@ManyToMany(fetch = FetchType.LAZY, mappedBy = "subCommunities")
|
||||
private List<Community> parentCommunities = new ArrayList<>();
|
||||
private Set<Community> parentCommunities = new HashSet<>();
|
||||
|
||||
@ManyToMany(fetch = FetchType.LAZY, mappedBy = "communities", cascade = {CascadeType.PERSIST})
|
||||
private final List<Collection> collections = new ArrayList<>();
|
||||
private Set<Collection> collections = new HashSet<>();
|
||||
|
||||
@OneToOne
|
||||
@JoinColumn(name = "admin")
|
||||
@@ -85,13 +89,13 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
|
||||
|
||||
void addSubCommunity(Community subCommunity)
|
||||
{
|
||||
getSubcommunities().add(subCommunity);
|
||||
subCommunities.add(subCommunity);
|
||||
setModified();
|
||||
}
|
||||
|
||||
void removeSubCommunity(Community subCommunity)
|
||||
{
|
||||
getSubcommunities().remove(subCommunity);
|
||||
subCommunities.remove(subCommunity);
|
||||
setModified();
|
||||
}
|
||||
|
||||
@@ -140,17 +144,21 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
|
||||
*/
|
||||
public List<Collection> getCollections()
|
||||
{
|
||||
return collections;
|
||||
// We return a copy because we do not want people to add elements to this collection directly.
|
||||
// We return a list to maintain backwards compatibility
|
||||
Collection[] output = collections.toArray(new Collection[]{});
|
||||
Arrays.sort(output, new NameAscendingComparator());
|
||||
return Arrays.asList(output);
|
||||
}
|
||||
|
||||
void addCollection(Collection collection)
|
||||
{
|
||||
getCollections().add(collection);
|
||||
collections.add(collection);
|
||||
}
|
||||
|
||||
void removeCollection(Collection collection)
|
||||
{
|
||||
getCollections().remove(collection);
|
||||
collections.remove(collection);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -162,7 +170,11 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
|
||||
*/
|
||||
public List<Community> getSubcommunities()
|
||||
{
|
||||
return subCommunities;
|
||||
// We return a copy because we do not want people to add elements to this collection directly.
|
||||
// We return a list to maintain backwards compatibility
|
||||
Community[] output = subCommunities.toArray(new Community[]{});
|
||||
Arrays.sort(output, new NameAscendingComparator());
|
||||
return Arrays.asList(output);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -173,16 +185,25 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
|
||||
*/
|
||||
public List<Community> getParentCommunities()
|
||||
{
|
||||
return parentCommunities;
|
||||
// We return a copy because we do not want people to add elements to this collection directly.
|
||||
// We return a list to maintain backwards compatibility
|
||||
Community[] output = parentCommunities.toArray(new Community[]{});
|
||||
Arrays.sort(output, new NameAscendingComparator());
|
||||
return Arrays.asList(output);
|
||||
}
|
||||
|
||||
void addParentCommunity(Community parentCommunity) {
|
||||
getParentCommunities().add(parentCommunity);
|
||||
parentCommunities.add(parentCommunity);
|
||||
}
|
||||
|
||||
void clearParentCommunities(){
|
||||
this.parentCommunities.clear();
|
||||
this.parentCommunities = null;
|
||||
parentCommunities.clear();
|
||||
}
|
||||
|
||||
public void removeParentCommunity(Community parentCommunity)
|
||||
{
|
||||
parentCommunities.remove(parentCommunity);
|
||||
setModified();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -452,12 +452,9 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
ArrayList<String> removedIdentifiers = getIdentifiers(context, childCommunity);
|
||||
String removedHandle = childCommunity.getHandle();
|
||||
UUID removedId = childCommunity.getID();
|
||||
|
||||
|
||||
rawDelete(context, childCommunity);
|
||||
|
||||
childCommunity.getParentCommunities().remove(parentCommunity);
|
||||
parentCommunity.removeSubCommunity(childCommunity);
|
||||
|
||||
|
||||
log.info(LogManager.getHeader(context, "remove_subcommunity",
|
||||
"parent_comm_id=" + parentCommunity.getID() + ",child_comm_id=" + childCommunity.getID()));
|
||||
|
||||
@@ -492,7 +489,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
Iterator<Community> subcommunities = community.getSubcommunities().iterator();
|
||||
while (subcommunities.hasNext()) {
|
||||
Community subCommunity = subcommunities.next();
|
||||
subcommunities.remove();
|
||||
community.removeSubCommunity(subCommunity);
|
||||
delete(context, subCommunity);
|
||||
}
|
||||
// now let the parent remove the community
|
||||
@@ -535,7 +532,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
while (collections.hasNext())
|
||||
{
|
||||
Collection collection = collections.next();
|
||||
collections.remove();
|
||||
community.removeCollection(collection);
|
||||
removeCollection(context, community, collection);
|
||||
}
|
||||
// delete subcommunities
|
||||
@@ -544,7 +541,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
while (subCommunities.hasNext())
|
||||
{
|
||||
Community subComm = subCommunities.next();
|
||||
subCommunities.remove();
|
||||
community.removeSubCommunity(subComm);
|
||||
delete(context, subComm);
|
||||
}
|
||||
|
||||
@@ -553,9 +550,16 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
|
||||
// Remove any Handle
|
||||
handleService.unbindHandle(context, community);
|
||||
|
||||
// Remove the parent-child relationship for the community we want ot delete
|
||||
Community parent = (Community) getParentObject(context, community);
|
||||
if (parent != null) {
|
||||
community.removeParentCommunity(parent);
|
||||
parent.removeSubCommunity(community);
|
||||
}
|
||||
|
||||
Group g = community.getAdministrators();
|
||||
|
||||
|
||||
// Delete community row
|
||||
communityDAO.delete(context, community);
|
||||
|
||||
|
@@ -7,17 +7,18 @@
|
||||
*/
|
||||
package org.dspace.content;
|
||||
|
||||
import org.dspace.content.comparator.NameAscendingComparator;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.hibernate.annotations.Sort;
|
||||
import org.hibernate.annotations.SortType;
|
||||
import org.hibernate.proxy.HibernateProxyHelper;
|
||||
|
||||
import javax.persistence.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* Class representing an item in DSpace.
|
||||
@@ -78,7 +79,7 @@ public class Item extends DSpaceObject implements DSpaceObjectLegacySupport
|
||||
joinColumns = {@JoinColumn(name = "item_id") },
|
||||
inverseJoinColumns = {@JoinColumn(name = "collection_id") }
|
||||
)
|
||||
private final List<Collection> collections = new ArrayList<>();
|
||||
private final Set<Collection> collections = new HashSet<>();
|
||||
|
||||
@ManyToMany(fetch = FetchType.LAZY, mappedBy = "items")
|
||||
private final List<Bundle> bundles = new ArrayList<>();
|
||||
@@ -224,23 +225,31 @@ public class Item extends DSpaceObject implements DSpaceObjectLegacySupport
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the collections this item is in. The order is indeterminate.
|
||||
* Get the collections this item is in. The order is sorted ascending by collection name.
|
||||
*
|
||||
* @return the collections this item is in, if any.
|
||||
*/
|
||||
public List<Collection> getCollections()
|
||||
{
|
||||
return collections;
|
||||
// We return a copy because we do not want people to add elements to this collection directly.
|
||||
// We return a list to maintain backwards compatibility
|
||||
Collection[] output = collections.toArray(new Collection[]{});
|
||||
Arrays.sort(output, new NameAscendingComparator());
|
||||
return Arrays.asList(output);
|
||||
}
|
||||
|
||||
void addCollection(Collection collection)
|
||||
{
|
||||
getCollections().add(collection);
|
||||
collections.add(collection);
|
||||
}
|
||||
|
||||
void removeCollection(Collection collection)
|
||||
{
|
||||
getCollections().remove(collection);
|
||||
collections.remove(collection);
|
||||
}
|
||||
|
||||
public void clearCollections(){
|
||||
collections.clear();
|
||||
}
|
||||
|
||||
public Collection getTemplateItemOf() {
|
||||
@@ -262,6 +271,31 @@ public class Item extends DSpaceObject implements DSpaceObjectLegacySupport
|
||||
return bundles;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the bundles matching a bundle name (name corresponds roughly to type)
|
||||
*
|
||||
* @param name
|
||||
* name of bundle (ORIGINAL/TEXT/THUMBNAIL)
|
||||
*
|
||||
* @return the bundles in an unordered array
|
||||
*/
|
||||
public List<Bundle> getBundles(String name)
|
||||
{
|
||||
List<Bundle> matchingBundles = new ArrayList<Bundle>();
|
||||
|
||||
// now only keep bundles with matching names
|
||||
List<Bundle> bunds = getBundles();
|
||||
for (Bundle bundle : bunds)
|
||||
{
|
||||
if (name.equals(bundle.getName()))
|
||||
{
|
||||
matchingBundles.add(bundle);
|
||||
}
|
||||
}
|
||||
|
||||
return matchingBundles;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a bundle to the item, should not be made public since we don't want to skip business logic
|
||||
* @param bundle the bundle to be added
|
||||
@@ -289,35 +323,35 @@ public class Item extends DSpaceObject implements DSpaceObjectLegacySupport
|
||||
* @return <code>true</code> if object passed in represents the same item
|
||||
* as this object
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj)
|
||||
{
|
||||
if (obj == null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj);
|
||||
if (this.getClass() != objClass)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
final Item otherItem = (Item) obj;
|
||||
if (!this.getID().equals(otherItem.getID()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
public boolean equals(Object obj)
|
||||
{
|
||||
if (obj == null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj);
|
||||
if (this.getClass() != objClass)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
final Item otherItem = (Item) obj;
|
||||
if (!this.getID().equals(otherItem.getID()))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode()
|
||||
{
|
||||
int hash = 5;
|
||||
hash += 71 * hash + getType();
|
||||
hash += 71 * hash + getID().hashCode();
|
||||
return hash;
|
||||
}
|
||||
@Override
|
||||
public int hashCode()
|
||||
{
|
||||
int hash = 5;
|
||||
hash += 71 * hash + getType();
|
||||
hash += 71 * hash + getID().hashCode();
|
||||
return hash;
|
||||
}
|
||||
|
||||
/**
|
||||
* return type found in Constants
|
||||
|
@@ -237,13 +237,25 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
return itemDAO.findAllByCollection(context, collection);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Item> findAllByCollection(Context context, Collection collection, Integer limit, Integer offset) throws SQLException {
|
||||
return itemDAO.findAllByCollection(context, collection, limit, offset);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Item> findInArchiveOrWithdrawnDiscoverableModifiedSince(Context context, Date since)
|
||||
throws SQLException
|
||||
{
|
||||
return itemDAO.findAll(context, true, true, true, since);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Iterator<Item> findInArchiveOrWithdrawnNonDiscoverableModifiedSince(Context context, Date since)
|
||||
throws SQLException
|
||||
{
|
||||
return itemDAO.findAll(context, true, true, false, since);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updateLastModified(Context context, Item item) throws SQLException, AuthorizeException {
|
||||
item.setLastModified(new Date());
|
||||
@@ -651,7 +663,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
}
|
||||
|
||||
//Only clear collections after we have removed everything else from the item
|
||||
item.getCollections().clear();
|
||||
item.clearCollections();
|
||||
item.setOwningCollection(null);
|
||||
|
||||
// Finally remove item row
|
||||
@@ -1189,6 +1201,11 @@ prevent the generation of resource policy entry values with null dspace_object a
|
||||
return itemDAO.countItems(context, collection, true, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int countAllItems(Context context, Collection collection) throws SQLException {
|
||||
return itemDAO.countItems(context, collection, true, false) + itemDAO.countItems(context, collection, false, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int countItems(Context context, Community community) throws SQLException {
|
||||
// First we need a list of all collections under this community in the hierarchy
|
||||
@@ -1197,6 +1214,15 @@ prevent the generation of resource policy entry values with null dspace_object a
|
||||
// Now, lets count unique items across that list of collections
|
||||
return itemDAO.countItems(context, collections, true, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int countAllItems(Context context, Community community) throws SQLException {
|
||||
// First we need a list of all collections under this community in the hierarchy
|
||||
List<Collection> collections = communityService.getAllCollections(context, community);
|
||||
|
||||
// Now, lets count unique items across that list of collections
|
||||
return itemDAO.countItems(context, collections, true, false) + itemDAO.countItems(context, collections, false, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void getAuthoritiesAndConfidences(String fieldKey, Collection collection, List<String> values, List<String> authorities, List<Integer> confidences, int i) {
|
||||
|
@@ -21,6 +21,7 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
@@ -98,7 +99,7 @@ public class MetadataValueServiceImpl implements MetadataValueService {
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<MetadataValue> findByValueLike(Context context, String value) throws SQLException {
|
||||
public Iterator<MetadataValue> findByValueLike(Context context, String value) throws SQLException {
|
||||
return metadataValueDAO.findByValueLike(context, value);
|
||||
}
|
||||
|
||||
|
@@ -12,7 +12,9 @@ import org.dspace.content.service.SiteService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.hibernate.annotations.CacheConcurrencyStrategy;
|
||||
|
||||
import javax.persistence.Cacheable;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.Table;
|
||||
import javax.persistence.Transient;
|
||||
@@ -22,6 +24,8 @@ import javax.persistence.Transient;
|
||||
* By default, the handle suffix "0" represents the Site, e.g. "1721.1/0"
|
||||
*/
|
||||
@Entity
|
||||
@Cacheable
|
||||
@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE)
|
||||
@Table(name = "site")
|
||||
public class Site extends DSpaceObject
|
||||
{
|
||||
|
@@ -93,11 +93,8 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
|
||||
Item item = itemService.create(context, workspaceItem);
|
||||
item.setSubmitter(context.getCurrentUser());
|
||||
|
||||
// Now create the policies for the submitter and workflow
|
||||
// users to modify item and contents
|
||||
// Now create the policies for the submitter to modify item and contents
|
||||
// contents = bitstreams, bundles
|
||||
// FIXME: icky hardcoded workflow steps
|
||||
workflowService.addInitialWorkspaceItemPolicies(context, workspaceItem);
|
||||
// read permission
|
||||
authorizeService.addPolicy(context, item, Constants.READ, item.getSubmitter(), ResourcePolicy.TYPE_SUBMISSION);
|
||||
// write permission
|
||||
|
@@ -168,7 +168,11 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Choic
|
||||
{
|
||||
init();
|
||||
log.debug("Getting matches for '" + text + "'");
|
||||
String xpathExpression = String.format(xpathTemplate, text.replaceAll("'", "'").toLowerCase());
|
||||
String xpathExpression = "";
|
||||
String[] textHierarchy = text.split(hierarchyDelimiter, -1);
|
||||
for (int i = 0; i < textHierarchy.length; i++) {
|
||||
xpathExpression += String.format(xpathTemplate, textHierarchy[i].replaceAll("'", "'").toLowerCase());
|
||||
}
|
||||
XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
Choice[] choices;
|
||||
try {
|
||||
|
@@ -7,10 +7,6 @@
|
||||
*/
|
||||
package org.dspace.content.authority;
|
||||
|
||||
import org.dspace.authority.AuthoritySearchService;
|
||||
import org.dspace.authority.AuthorityValue;
|
||||
import org.dspace.authority.factory.AuthorityServiceFactory;
|
||||
import org.dspace.authority.rest.RestSource;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
@@ -18,6 +14,10 @@ import org.apache.solr.client.solrj.response.QueryResponse;
|
||||
import org.apache.solr.common.SolrDocument;
|
||||
import org.apache.solr.common.SolrDocumentList;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.dspace.authority.AuthoritySearchService;
|
||||
import org.dspace.authority.AuthorityValue;
|
||||
import org.dspace.authority.factory.AuthorityServiceFactory;
|
||||
import org.dspace.authority.SolrAuthorityInterface;
|
||||
import org.dspace.authority.service.AuthorityValueService;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
@@ -38,7 +38,8 @@ import java.util.Map;
|
||||
public class SolrAuthority implements ChoiceAuthority {
|
||||
|
||||
private static final Logger log = Logger.getLogger(SolrAuthority.class);
|
||||
protected RestSource source = DSpaceServicesFactory.getInstance().getServiceManager().getServiceByName("AuthoritySource", RestSource.class);
|
||||
protected SolrAuthorityInterface source = DSpaceServicesFactory.getInstance().getServiceManager()
|
||||
.getServiceByName("AuthoritySource", SolrAuthorityInterface.class);
|
||||
protected boolean externalResults = false;
|
||||
protected final AuthorityValueService authorityValueService = AuthorityServiceFactory.getInstance().getAuthorityValueService();
|
||||
|
||||
|
@@ -0,0 +1,39 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.content.comparator;
|
||||
|
||||
import org.apache.commons.lang.ObjectUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
|
||||
import java.util.Comparator;
|
||||
|
||||
public class NameAscendingComparator implements Comparator<DSpaceObject>{
|
||||
|
||||
@Override
|
||||
public int compare(DSpaceObject dso1, DSpaceObject dso2) {
|
||||
if (dso1 == dso2){
|
||||
return 0;
|
||||
}else if (dso1 == null){
|
||||
return -1;
|
||||
}else if (dso2 == null){
|
||||
return 1;
|
||||
}else {
|
||||
String name1 = StringUtils.trimToEmpty(dso1.getName());
|
||||
String name2 = StringUtils.trimToEmpty(dso2.getName());
|
||||
|
||||
//When two DSO's have the same name, use their UUID to put them in an order
|
||||
if(name1.equals(name2)) {
|
||||
return ObjectUtils.compare(dso1.getID(), dso2.getID());
|
||||
} else {
|
||||
return name1.compareToIgnoreCase(name2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -32,6 +32,8 @@ import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.SelfNamedPlugin;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.jdom.Attribute;
|
||||
import org.jdom.Document;
|
||||
import org.jdom.Element;
|
||||
@@ -89,6 +91,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
|
||||
protected final CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
|
||||
protected final CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
|
||||
protected final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
protected final HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
|
||||
|
||||
/**
|
||||
* Fill in the plugin alias table from DSpace configuration entries
|
||||
@@ -455,7 +458,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
|
||||
{
|
||||
List<MockMetadataValue> metadata = new ArrayList<>();
|
||||
|
||||
String identifier_uri = "http://hdl.handle.net/"
|
||||
String identifier_uri = handleService.getCanonicalPrefix()
|
||||
+ site.getHandle();
|
||||
String title = site.getName();
|
||||
String url = site.getURL();
|
||||
@@ -493,7 +496,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
|
||||
String description = communityService.getMetadata(community, "introductory_text");
|
||||
String description_abstract = communityService.getMetadata(community, "short_description");
|
||||
String description_table = communityService.getMetadata(community,"side_bar_text");
|
||||
String identifier_uri = "http://hdl.handle.net/"
|
||||
String identifier_uri = handleService.getCanonicalPrefix()
|
||||
+ community.getHandle();
|
||||
String rights = communityService.getMetadata(community,"copyright_text");
|
||||
String title = communityService.getMetadata(community,"name");
|
||||
@@ -543,7 +546,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
|
||||
String description = collectionService.getMetadata(collection, "introductory_text");
|
||||
String description_abstract = collectionService.getMetadata(collection, "short_description");
|
||||
String description_table = collectionService.getMetadata(collection, "side_bar_text");
|
||||
String identifier_uri = "http://hdl.handle.net/"
|
||||
String identifier_uri = handleService.getCanonicalPrefix()
|
||||
+ collection.getHandle();
|
||||
String provenance = collectionService.getMetadata(collection, "provenance_description");
|
||||
String rights = collectionService.getMetadata(collection, "copyright_text");
|
||||
|
@@ -179,7 +179,7 @@ public class XSLTIngestionCrosswalk
|
||||
}
|
||||
try
|
||||
{
|
||||
JDOMSource source = new JDOMSource(new Document((Element)root.cloneContent()));
|
||||
JDOMSource source = new JDOMSource(new Document((Element)root.clone()));
|
||||
JDOMResult result = new JDOMResult();
|
||||
xform.transform(source, result);
|
||||
Document dimDoc = result.getDocument();
|
||||
|
@@ -57,6 +57,8 @@ public interface ItemDAO extends DSpaceObjectLegacySupportDAO<Item>
|
||||
|
||||
public Iterator<Item> findAllByCollection(Context context, Collection collection) throws SQLException;
|
||||
|
||||
public Iterator<Item> findAllByCollection(Context context, Collection collection, Integer limit, Integer offset) throws SQLException;
|
||||
|
||||
/**
|
||||
* Count number of items in a given collection
|
||||
* @param context context
|
||||
|
@@ -13,6 +13,7 @@ import org.dspace.core.Context;
|
||||
import org.dspace.core.GenericDAO;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
@@ -26,7 +27,7 @@ public interface MetadataValueDAO extends GenericDAO<MetadataValue> {
|
||||
|
||||
public List<MetadataValue> findByField(Context context, MetadataField fieldId) throws SQLException;
|
||||
|
||||
public List<MetadataValue> findByValueLike(Context context, String value) throws SQLException;
|
||||
public Iterator<MetadataValue> findByValueLike(Context context, String value) throws SQLException;
|
||||
|
||||
public void deleteByMetadataField(Context context, MetadataField metadataField) throws SQLException;
|
||||
|
||||
|
@@ -121,6 +121,8 @@ public class CollectionDAOImpl extends AbstractHibernateDSODAO<Collection> imple
|
||||
Restrictions.eq("resourcePolicy.eperson", ePerson),
|
||||
actionQuery
|
||||
));
|
||||
criteria.setCacheable(true);
|
||||
|
||||
return list(criteria);
|
||||
}
|
||||
|
||||
@@ -160,6 +162,8 @@ public class CollectionDAOImpl extends AbstractHibernateDSODAO<Collection> imple
|
||||
query.append(" AND rp.epersonGroup.id IN (select g.id from Group g where (from EPerson e where e.id = :eperson_id) in elements(epeople))");
|
||||
Query hibernateQuery = createQuery(context, query.toString());
|
||||
hibernateQuery.setParameter("eperson_id", ePerson.getID());
|
||||
hibernateQuery.setCacheable(true);
|
||||
|
||||
return list(hibernateQuery);
|
||||
|
||||
|
||||
|
@@ -91,6 +91,7 @@ public class CommunityDAOImpl extends AbstractHibernateDSODAO<Community> impleme
|
||||
|
||||
Query query = createQuery(context, queryBuilder.toString());
|
||||
query.setParameter(sortField.toString(), sortField.getID());
|
||||
query.setCacheable(true);
|
||||
|
||||
return findMany(context, query);
|
||||
}
|
||||
@@ -129,6 +130,8 @@ public class CommunityDAOImpl extends AbstractHibernateDSODAO<Community> impleme
|
||||
Restrictions.eq("resourcePolicy.eperson", ePerson),
|
||||
actionQuery
|
||||
));
|
||||
criteria.setCacheable(true);
|
||||
|
||||
return list(criteria);
|
||||
}
|
||||
|
||||
@@ -164,6 +167,8 @@ public class CommunityDAOImpl extends AbstractHibernateDSODAO<Community> impleme
|
||||
query.append(" AND rp.epersonGroup.id IN (select g.id from Group g where (from EPerson e where e.id = :eperson_id) in elements(epeople))");
|
||||
Query hibernateQuery = createQuery(context, query.toString());
|
||||
hibernateQuery.setParameter("eperson_id", ePerson.getID());
|
||||
hibernateQuery.setCacheable(true);
|
||||
|
||||
return list(hibernateQuery);
|
||||
}
|
||||
|
||||
|
@@ -104,7 +104,8 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
|
||||
addMetadataLeftJoin(query, Item.class.getSimpleName().toLowerCase(), Collections.singletonList(metadataField));
|
||||
query.append(" WHERE item.inArchive = :in_archive");
|
||||
query.append(" AND item.submitter =:submitter");
|
||||
addMetadataSortQuery(query, Collections.singletonList(metadataField), null);
|
||||
//submissions should sort in reverse by date by default
|
||||
addMetadataSortQuery(query, Collections.singletonList(metadataField), null, Collections.singletonList("desc"));
|
||||
|
||||
Query hibernateQuery = createQuery(context, query.toString());
|
||||
hibernateQuery.setParameter(metadataField.toString(), metadataField.getID());
|
||||
@@ -232,6 +233,24 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
|
||||
return iterate(query);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Item> findAllByCollection(Context context, Collection collection, Integer limit, Integer offset) throws SQLException {
|
||||
Query query = createQuery(context, "select i from Item i join i.collections c WHERE :collection IN c");
|
||||
query.setParameter("collection", collection);
|
||||
|
||||
if(offset != null)
|
||||
{
|
||||
query.setFirstResult(offset);
|
||||
}
|
||||
if(limit != null)
|
||||
{
|
||||
query.setMaxResults(limit);
|
||||
}
|
||||
|
||||
return iterate(query);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public int countItems(Context context, Collection collection, boolean includeArchived, boolean includeWithdrawn) throws SQLException {
|
||||
Query query = createQuery(context, "select count(i) from Item i join i.collections c WHERE :collection IN c AND i.inArchive=:in_archive AND i.withdrawn=:withdrawn");
|
||||
|
@@ -18,6 +18,7 @@ import org.hibernate.Query;
|
||||
import org.hibernate.criterion.Restrictions;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
@@ -48,14 +49,14 @@ public class MetadataValueDAOImpl extends AbstractHibernateDAO<MetadataValue> im
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<MetadataValue> findByValueLike(Context context, String value) throws SQLException {
|
||||
Criteria criteria = createCriteria(context, MetadataValue.class);
|
||||
criteria.add(
|
||||
Restrictions.like("value", "%" + value + "%")
|
||||
);
|
||||
criteria.setFetchMode("metadataField", FetchMode.JOIN);
|
||||
public Iterator<MetadataValue> findByValueLike(Context context, String value) throws SQLException {
|
||||
String queryString = "SELECT m FROM MetadataValue m JOIN m.metadataField f " +
|
||||
"WHERE m.value like concat('%', concat(:searchString,'%')) ORDER BY m.id ASC";
|
||||
|
||||
return list(criteria);
|
||||
Query query = createQuery(context, queryString);
|
||||
query.setString("searchString", value);
|
||||
|
||||
return iterate(query);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -32,6 +32,7 @@ public class SiteDAOImpl extends AbstractHibernateDAO<Site> implements SiteDAO
|
||||
@Override
|
||||
public Site findSite(Context context) throws SQLException {
|
||||
Criteria criteria = createCriteria(context, Site.class);
|
||||
criteria.setCacheable(true);
|
||||
return uniqueResult(criteria);
|
||||
}
|
||||
}
|
||||
|
@@ -27,6 +27,22 @@ public interface BitstreamService extends DSpaceObjectService<Bitstream>, DSpace
|
||||
|
||||
public List<Bitstream> findAll(Context context) throws SQLException;
|
||||
|
||||
/**
|
||||
* Clone the given bitstream by firstly creating a new bitstream, with a new ID.
|
||||
* Then set the internal identifier, file size, checksum, and
|
||||
* checksum algorithm as same as the given bitstream.
|
||||
* This allows multiple bitstreams to share the same internal identifier of assets .
|
||||
* An example of such a use case scenario is versioning.
|
||||
*
|
||||
* @param context
|
||||
* DSpace context object
|
||||
* @param bitstream
|
||||
* Bitstream to be cloned
|
||||
* @return the clone
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public Bitstream clone(Context context, Bitstream bitstream) throws SQLException;
|
||||
|
||||
/**
|
||||
* Create a new bitstream, with a new ID. The checksum and file size are
|
||||
* calculated. No authorization checks are made in this method.
|
||||
@@ -62,7 +78,7 @@ public interface BitstreamService extends DSpaceObjectService<Bitstream>, DSpace
|
||||
* @throws AuthorizeException if authorization error
|
||||
*/
|
||||
public Bitstream create(Context context, Bundle bundle, InputStream is) throws IOException, SQLException, AuthorizeException;
|
||||
|
||||
|
||||
/**
|
||||
* Register a new bitstream, with a new ID. The checksum and file size
|
||||
* are calculated. The newly created bitstream has the "unknown"
|
||||
|
@@ -161,7 +161,8 @@ public interface CollectionService extends DSpaceObjectService<Collection>, DSpa
|
||||
* @param group
|
||||
* the new workflow group, or <code>null</code>
|
||||
*/
|
||||
public void setWorkflowGroup(Collection collection, int step, Group group);
|
||||
public void setWorkflowGroup(Context context, Collection collection, int step, Group group)
|
||||
throws SQLException, AuthorizeException;
|
||||
|
||||
/**
|
||||
* Get the the workflow group corresponding to a particular workflow step.
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user