mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-07 01:54:22 +00:00
Compare commits
428 Commits
dspace-9.1
...
61f2695b83
Author | SHA1 | Date | |
---|---|---|---|
![]() |
61f2695b83 | ||
![]() |
8643888d68 | ||
![]() |
d8fbe16ede | ||
![]() |
29e13b77fc | ||
![]() |
fc74a7ffdf | ||
![]() |
6ac823d29a | ||
![]() |
0246588ab5 | ||
![]() |
190b908a8a | ||
![]() |
f8ac04b591 | ||
![]() |
23cd582801 | ||
![]() |
c100eb2567 | ||
![]() |
de50420624 | ||
![]() |
988a67d8fe | ||
![]() |
7a95781068 | ||
![]() |
cad7e9397b | ||
![]() |
f84ddb541d | ||
![]() |
45fa8277d8 | ||
![]() |
d4c826d871 | ||
![]() |
ee5093b6a6 | ||
![]() |
e672be5eaa | ||
![]() |
fcde7489d3 | ||
![]() |
4402847f38 | ||
![]() |
a180a257cd | ||
![]() |
467e42ca27 | ||
![]() |
13c6835656 | ||
![]() |
f700c37a73 | ||
![]() |
0902db21eb | ||
![]() |
11ba34b1fd | ||
![]() |
936cb3c101 | ||
![]() |
7c7bbeadc5 | ||
![]() |
5e3c57a920 | ||
![]() |
beb5a096ab | ||
![]() |
73a0316f99 | ||
![]() |
257dc13d32 | ||
![]() |
dfa38c90df | ||
![]() |
cd62cac431 | ||
![]() |
847904cadb | ||
![]() |
127770219b | ||
![]() |
98e89caa35 | ||
![]() |
a063a7a4f6 | ||
![]() |
7dd2b5318b | ||
![]() |
29b3ba9c57 | ||
![]() |
568fdab395 | ||
![]() |
fe4077acee | ||
![]() |
cbecc64a5a | ||
![]() |
5ed0b28a36 | ||
![]() |
54a6011e60 | ||
![]() |
be9d2bd2bf | ||
![]() |
e4d058f1c9 | ||
![]() |
603f638174 | ||
![]() |
66fc49800d | ||
![]() |
376f49e99b | ||
![]() |
4dedd1b66f | ||
![]() |
8f4a3f963a | ||
![]() |
cc35141661 | ||
![]() |
fd1aa013a7 | ||
![]() |
3721d231b3 | ||
![]() |
45b677c20e | ||
![]() |
ccf253ddda | ||
![]() |
2c34dde3a8 | ||
![]() |
13982618df | ||
![]() |
cf3f8e9bb2 | ||
![]() |
9bef7b58d6 | ||
![]() |
b78f570dbf | ||
![]() |
103d7f754a | ||
![]() |
ff0ebea316 | ||
![]() |
f831acf4d7 | ||
![]() |
21343d3158 | ||
![]() |
abc238cdd0 | ||
![]() |
922109b10a | ||
![]() |
23150bc89a | ||
![]() |
e51a2537ac | ||
![]() |
94a1a0b2e0 | ||
![]() |
71af1503ae | ||
![]() |
d063f1ef66 | ||
![]() |
be3702f419 | ||
![]() |
aa0fe083a6 | ||
![]() |
38dd432feb | ||
![]() |
bce865f299 | ||
![]() |
558f0c7bea | ||
![]() |
eec374a7c5 | ||
![]() |
375c0aee1c | ||
![]() |
56b38bb26d | ||
![]() |
e4f9e90394 | ||
![]() |
05af860553 | ||
![]() |
ea6e025519 | ||
![]() |
b8cbaa053d | ||
![]() |
af654e57f2 | ||
![]() |
3bca2164be | ||
![]() |
389269e958 | ||
![]() |
2d353ea995 | ||
![]() |
e0dd7454a4 | ||
![]() |
7153efe412 | ||
![]() |
0a3733db95 | ||
![]() |
ee45f736cd | ||
![]() |
808135f688 | ||
![]() |
1ecb5660f8 | ||
![]() |
6282cb9f12 | ||
![]() |
2b2d0b19b6 | ||
![]() |
77f07a735b | ||
![]() |
613ef1dc62 | ||
![]() |
5e3463bd8b | ||
![]() |
e3c64906de | ||
![]() |
8c79201da3 | ||
![]() |
c22583e892 | ||
![]() |
af38697ade | ||
![]() |
ccdbc381c5 | ||
![]() |
584d70db81 | ||
![]() |
ceab9526c4 | ||
![]() |
45c37bcf66 | ||
![]() |
69a83f0bdd | ||
![]() |
18d53743cd | ||
![]() |
f6f2d5e05c | ||
![]() |
dc65c7a304 | ||
![]() |
de220d55ac | ||
![]() |
ac81a3d36f | ||
![]() |
92120211da | ||
![]() |
609062befb | ||
![]() |
ffd60bf811 | ||
![]() |
695aa673d3 | ||
![]() |
2f66457abe | ||
![]() |
c061015ecf | ||
![]() |
6e8b6cc33a | ||
![]() |
479cb76885 | ||
![]() |
588c4ef4d2 | ||
![]() |
5f291400b3 | ||
![]() |
1bd4da3899 | ||
![]() |
15b3f314a3 | ||
![]() |
28686fc48a | ||
![]() |
d2c120bd8b | ||
![]() |
39fb2afba1 | ||
![]() |
5cd3daa379 | ||
![]() |
9424ccf4aa | ||
![]() |
a5f04f9c77 | ||
![]() |
f506d18bb1 | ||
![]() |
bc17559162 | ||
![]() |
907b42c2a9 | ||
![]() |
b0a4a3400f | ||
![]() |
84e308c8f5 | ||
![]() |
91989e5efb | ||
![]() |
e9bc74cf6d | ||
![]() |
dda6d9ec9d | ||
![]() |
45a9f8b530 | ||
![]() |
a381611ebd | ||
![]() |
259c3ddd37 | ||
![]() |
d48e22aff5 | ||
![]() |
4fd3112ebf | ||
![]() |
99b2a630a7 | ||
![]() |
8c80b67b04 | ||
![]() |
f7dcbf1b44 | ||
![]() |
a9546055f4 | ||
![]() |
8f9a7f1f92 | ||
![]() |
042bf15237 | ||
![]() |
76c50ac43e | ||
![]() |
2b9b14501c | ||
![]() |
96ce51ae09 | ||
![]() |
66a75f522f | ||
![]() |
88a1ac08bf | ||
![]() |
aac45284d2 | ||
![]() |
130442746a | ||
![]() |
934f73f412 | ||
![]() |
172783691b | ||
![]() |
04f443b589 | ||
![]() |
61277b72d9 | ||
![]() |
e2e402f0de | ||
![]() |
d0b5911cf6 | ||
![]() |
27540b93bf | ||
![]() |
62512bdec8 | ||
![]() |
cd8c277ae5 | ||
![]() |
9f9d810bea | ||
![]() |
57a1de5ecd | ||
![]() |
e3b9179482 | ||
![]() |
312416a716 | ||
![]() |
8839eefbe7 | ||
![]() |
b594ebbf9e | ||
![]() |
230bf80b5f | ||
![]() |
b8923c986f | ||
![]() |
c587d70a09 | ||
![]() |
8a84cba371 | ||
![]() |
973c5f9daa | ||
![]() |
61c6e59a06 | ||
![]() |
e90f792869 | ||
![]() |
1732285d59 | ||
![]() |
03992be08d | ||
![]() |
2104d605bd | ||
![]() |
a24340a197 | ||
![]() |
de5908725a | ||
![]() |
f92e376896 | ||
![]() |
39def52592 | ||
![]() |
6337f2fa67 | ||
![]() |
07e840b675 | ||
![]() |
34134b3c3b | ||
![]() |
309e25e8c8 | ||
![]() |
bc7a681428 | ||
![]() |
9eef166b7e | ||
![]() |
bd753005e6 | ||
![]() |
d9cc564ace | ||
![]() |
2abbe67ea6 | ||
![]() |
2c400bf2da | ||
![]() |
4afd54cbbb | ||
![]() |
1f174f4657 | ||
![]() |
144d9a25d2 | ||
![]() |
a5ae3705e7 | ||
![]() |
d740232199 | ||
![]() |
ec27f96d5f | ||
![]() |
358c00c199 | ||
![]() |
a458fbd6ce | ||
![]() |
0bc41bfaaf | ||
![]() |
415be6283a | ||
![]() |
94fe90f9d8 | ||
![]() |
87ce9fd136 | ||
![]() |
f2cbcca4c5 | ||
![]() |
a6cc912e62 | ||
![]() |
21c56aeda7 | ||
![]() |
25ad6039dc | ||
![]() |
de7fd98a34 | ||
![]() |
3396233f2d | ||
![]() |
19e22c10f1 | ||
![]() |
32bd615ba8 | ||
![]() |
91d845563a | ||
![]() |
0ad9699128 | ||
![]() |
9b1899de48 | ||
![]() |
4367eebef2 | ||
![]() |
b7960bd42e | ||
![]() |
3fef856ef0 | ||
![]() |
53b819b4b5 | ||
![]() |
a2a6838390 | ||
![]() |
c791ac1460 | ||
![]() |
2330ff37b5 | ||
![]() |
a1582d9477 | ||
![]() |
46440dcc2d | ||
![]() |
8b3618f583 | ||
![]() |
878ecdc09d | ||
![]() |
a46e01d6db | ||
![]() |
e26fd0d9dc | ||
![]() |
a2a0649227 | ||
![]() |
359dc609a8 | ||
![]() |
e6475dc04f | ||
![]() |
fa603b2a58 | ||
![]() |
bbfc6874a4 | ||
![]() |
92a92abe9f | ||
![]() |
66b6ed3f4a | ||
![]() |
02ead6997f | ||
![]() |
5765bff79c | ||
![]() |
42e979a021 | ||
![]() |
102649b404 | ||
![]() |
086a26d3b4 | ||
![]() |
44d657da1e | ||
![]() |
53e4147910 | ||
![]() |
2c6f02f74c | ||
![]() |
01527712c1 | ||
![]() |
e975ed42b0 | ||
![]() |
1ade960988 | ||
![]() |
ac20eefe4b | ||
![]() |
0ef3b27189 | ||
![]() |
032252664b | ||
![]() |
05f96db258 | ||
![]() |
c8fa70073d | ||
![]() |
5169c48632 | ||
![]() |
409b775d35 | ||
![]() |
dbcaac4b08 | ||
![]() |
9d6c482cc4 | ||
![]() |
0f1679ed72 | ||
![]() |
7f865ad956 | ||
![]() |
f4390fef52 | ||
![]() |
f9307b617c | ||
![]() |
b9352c9149 | ||
![]() |
32dd1a3dd2 | ||
![]() |
a8d33d3ad0 | ||
![]() |
c2c41e65f8 | ||
![]() |
4c7d9223cb | ||
![]() |
930565efff | ||
![]() |
8b089be727 | ||
![]() |
f5e83433c4 | ||
![]() |
a26ef22a46 | ||
![]() |
1052e6b787 | ||
![]() |
d7948d5f7b | ||
![]() |
5dd4e4248c | ||
![]() |
bacbe06b2c | ||
![]() |
a0c2891226 | ||
![]() |
663ffcf2b1 | ||
![]() |
3f9f5639f6 | ||
![]() |
605956b073 | ||
![]() |
fd33cee025 | ||
![]() |
eb5f09f3f2 | ||
![]() |
fae4130d41 | ||
![]() |
5929fdc926 | ||
![]() |
c5c8417848 | ||
![]() |
4626b06d7f | ||
![]() |
4ec611bf79 | ||
![]() |
bf917357a6 | ||
![]() |
d83779022e | ||
![]() |
b04eb9d725 | ||
![]() |
4821e75294 | ||
![]() |
f4e02b73f2 | ||
![]() |
4a89a68736 | ||
![]() |
2429a0ba29 | ||
![]() |
8ca8bd4543 | ||
![]() |
c6098c0232 | ||
![]() |
b937fa8799 | ||
![]() |
90ea371e0b | ||
![]() |
6fe9af84bd | ||
![]() |
1494732989 | ||
![]() |
85a9e4b731 | ||
![]() |
fbb496e1c6 | ||
![]() |
98921724f4 | ||
![]() |
f94a4e4b71 | ||
![]() |
e94d934a5c | ||
![]() |
160a70ed16 | ||
![]() |
d63a088e70 | ||
![]() |
46880b025e | ||
![]() |
23dfcf0af6 | ||
![]() |
19010353c9 | ||
![]() |
5400e3f8a8 | ||
![]() |
ff67241bc9 | ||
![]() |
b949bdd4c0 | ||
![]() |
e4d5d7cbcc | ||
![]() |
1278068eaa | ||
![]() |
106936967e | ||
![]() |
cf9e5c1cd4 | ||
![]() |
ac7dfc562f | ||
![]() |
37338424e3 | ||
![]() |
1a619b2833 | ||
![]() |
ca3b2de1a8 | ||
![]() |
869d122eac | ||
![]() |
045a5c0b0e | ||
![]() |
9d1e2d5f9d | ||
![]() |
440bb64809 | ||
![]() |
0695b6756c | ||
![]() |
c8a0c94f78 | ||
![]() |
5d880bcf2d | ||
![]() |
133c280883 | ||
![]() |
0a79903f30 | ||
![]() |
4ec40054b1 | ||
![]() |
b2f44f57f9 | ||
![]() |
d6ff41d9f5 | ||
![]() |
6817d2aa59 | ||
![]() |
3a894e5f5d | ||
![]() |
d74ae7345e | ||
![]() |
3f653411cf | ||
![]() |
086e54cb31 | ||
![]() |
dd62a57564 | ||
![]() |
e8ee694f21 | ||
![]() |
5b8782509f | ||
![]() |
64c5f82209 | ||
![]() |
7a876999f8 | ||
![]() |
31549bdace | ||
![]() |
e122a90674 | ||
![]() |
ac7eee6465 | ||
![]() |
503f78f065 | ||
![]() |
8d79120eec | ||
![]() |
76018e0048 | ||
![]() |
5fd134d7cf | ||
![]() |
3de7bab453 | ||
![]() |
16239433f6 | ||
![]() |
bb2ed04ed3 | ||
![]() |
a5806fb518 | ||
![]() |
b64b79973d | ||
![]() |
7d5db89889 | ||
![]() |
c8cc425357 | ||
![]() |
5fa01ef63e | ||
![]() |
68cffba5fa | ||
![]() |
bfe7e7871e | ||
![]() |
a6fcdeab9e | ||
![]() |
4c73e4b01f | ||
![]() |
a2a56d416e | ||
![]() |
1fe697a19e | ||
![]() |
3fa57d7c0e | ||
![]() |
d68c5ac8eb | ||
![]() |
c797441b86 | ||
![]() |
56e8322079 | ||
![]() |
0a3174551f | ||
![]() |
bdfe2bfe2e | ||
![]() |
00da667ba5 | ||
![]() |
fbec7f2e56 | ||
![]() |
238893ce6d | ||
![]() |
4c0bbeffa3 | ||
![]() |
15dab1e41e | ||
![]() |
1d4117f4ba | ||
![]() |
4972a5d8f9 | ||
![]() |
8fd3429c9a | ||
![]() |
f1ddc78729 | ||
![]() |
0cb3020442 | ||
![]() |
1d85653ed0 | ||
![]() |
51c766caa3 | ||
![]() |
edbf9ef605 | ||
![]() |
b63329b45a | ||
![]() |
8adbe244bf | ||
![]() |
268b5fc8b7 | ||
![]() |
fedbccae49 | ||
![]() |
fcc4dcf5b3 | ||
![]() |
ee762260cf | ||
![]() |
63bb639e68 | ||
![]() |
dfe5628ed9 | ||
![]() |
fc4a7628ef | ||
![]() |
102c347455 | ||
![]() |
cf7370e2bc | ||
![]() |
1c59295693 | ||
![]() |
2c13ee40fe | ||
![]() |
090001b685 | ||
![]() |
ba318d9710 | ||
![]() |
06a5458205 | ||
![]() |
8e411ac70c | ||
![]() |
ce1dbb6d2c | ||
![]() |
cb4fb777c5 | ||
![]() |
72cedb7722 | ||
![]() |
dd6183b9cb | ||
![]() |
a2cb8cc838 | ||
![]() |
e0aa56ca82 | ||
![]() |
723ad07ae9 | ||
![]() |
63fbffc963 | ||
![]() |
bf520782d8 | ||
![]() |
e8a54e698c | ||
![]() |
3885966b4d | ||
![]() |
859dd4c69d | ||
![]() |
507aade92c | ||
![]() |
997945d85d | ||
![]() |
745e9c468a | ||
![]() |
82b0944137 | ||
![]() |
ecbb8682a7 | ||
![]() |
e540168594 | ||
![]() |
52e5b35c06 | ||
![]() |
31c79500ce | ||
![]() |
04953b94d9 | ||
![]() |
c017a662e0 | ||
![]() |
30cd44c196 | ||
![]() |
10a95ae92a | ||
![]() |
31d36e7abf |
6
.github/workflows/codescan.yml
vendored
6
.github/workflows/codescan.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
# https://github.com/github/codeql-action
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
# Codescan Javascript as well since a few JS files exist in REST API's interface
|
||||
languages: java, javascript
|
||||
@@ -56,8 +56,8 @@ jobs:
|
||||
# NOTE: Based on testing, this autobuild process works well for DSpace. A custom
|
||||
# DSpace build w/caching (like in build.yml) was about the same speed as autobuild.
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# Perform GitHub Code Scanning.
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
uses: github/codeql-action/analyze@v3
|
||||
|
13
.github/workflows/docker.yml
vendored
13
.github/workflows/docker.yml
vendored
@@ -219,6 +219,19 @@ jobs:
|
||||
result=$(wget -O- -q http://127.0.0.1:8080/server/api/core/collections)
|
||||
echo "$result"
|
||||
echo "$result" | grep -oE "\"Dog in Yard\","
|
||||
# Verify basic backend logging is working.
|
||||
# 1. Access the top communities list. Verify that the "Before request" INFO statement is logged
|
||||
# 2. Access an invalid endpoint (and ignore 404 response). Verify that a "status:404" WARN statement is logged
|
||||
- name: Verify backend is logging properly
|
||||
run: |
|
||||
wget -O/dev/null -q http://127.0.0.1:8080/server/api/core/communities/search/top
|
||||
logs=$(docker compose -f docker-compose.yml logs -n 5 dspace)
|
||||
echo "$logs"
|
||||
echo "$logs" | grep -o "Before request \[GET /server/api/core/communities/search/top\]"
|
||||
wget -O/dev/null -q http://127.0.0.1:8080/server/api/does/not/exist || true
|
||||
logs=$(docker compose -f docker-compose.yml logs -n 5 dspace)
|
||||
echo "$logs"
|
||||
echo "$logs" | grep -o "status:404 exception: The repository type does.not was not found"
|
||||
# Verify Handle Server can be stared and is working properly
|
||||
# 1. First generate the "[dspace]/handle-server" folder with the sitebndl.zip
|
||||
# 2. Start the Handle Server (and wait 20 seconds to let it start up)
|
||||
|
15
.github/workflows/reusable-docker-build.yml
vendored
15
.github/workflows/reusable-docker-build.yml
vendored
@@ -73,7 +73,7 @@ env:
|
||||
REDEPLOY_DEMO_URL: ${{ secrets.REDEPLOY_DEMO_URL }}
|
||||
# Current DSpace maintenance branch (and architecture) which is deployed to demo.dspace.org / sandbox.dspace.org
|
||||
# (NOTE: No deployment branch specified for sandbox.dspace.org as it uses the default_branch)
|
||||
DEPLOY_DEMO_BRANCH: 'dspace-8_x'
|
||||
DEPLOY_DEMO_BRANCH: 'dspace-9_x'
|
||||
DEPLOY_SANDBOX_BRANCH: 'main'
|
||||
DEPLOY_ARCH: 'linux/amd64'
|
||||
# Registry used during building of Docker images. (All images are later copied to docker.io registry)
|
||||
@@ -87,17 +87,16 @@ jobs:
|
||||
matrix:
|
||||
# Architectures / Platforms for which we will build Docker images
|
||||
arch: [ 'linux/amd64', 'linux/arm64' ]
|
||||
os: [ ubuntu-latest ]
|
||||
isPr:
|
||||
- ${{ github.event_name == 'pull_request' }}
|
||||
# If this is a PR, we ONLY build for AMD64. For PRs we only do a sanity check test to ensure Docker builds work.
|
||||
# The below exclude therefore ensures we do NOT build ARM64 for PRs.
|
||||
exclude:
|
||||
- isPr: true
|
||||
os: ubuntu-latest
|
||||
arch: linux/arm64
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
# If ARM64, then use the Ubuntu ARM64 runner. Otherwise, use the Ubuntu AMD64 runner
|
||||
runs-on: ${{ matrix.arch == 'linux/arm64' && 'ubuntu-24.04-arm' || 'ubuntu-latest' }}
|
||||
|
||||
steps:
|
||||
# This step converts the slashes in the "arch" matrix values above into dashes & saves to env.ARCH_NAME
|
||||
@@ -123,10 +122,6 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# https://github.com/docker/setup-qemu-action
|
||||
- name: Set up QEMU emulation to build for multiple architectures
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
# https://github.com/docker/setup-buildx-action
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
@@ -170,7 +165,7 @@ jobs:
|
||||
# Use GitHub cache to load cached Docker images and cache the results of this build
|
||||
# This decreases the number of images we need to fetch from DockerHub
|
||||
cache-from: type=gha,scope=${{ inputs.build_id }}
|
||||
cache-to: type=gha,scope=${{ inputs.build_id }},mode=max
|
||||
cache-to: type=gha,scope=${{ inputs.build_id }},mode=min
|
||||
|
||||
# Export the digest of Docker build locally
|
||||
- name: Export Docker build digest
|
||||
@@ -222,7 +217,7 @@ jobs:
|
||||
# Use GitHub cache to load cached Docker images and cache the results of this build
|
||||
# This decreases the number of images we need to fetch from DockerHub
|
||||
cache-from: type=gha,scope=${{ inputs.build_id }}
|
||||
cache-to: type=gha,scope=${{ inputs.build_id }},mode=max
|
||||
cache-to: type=gha,scope=${{ inputs.build_id }},mode=min
|
||||
# Export image to a local TAR file
|
||||
outputs: type=docker,dest=/tmp/${{ inputs.build_id }}.tar
|
||||
|
||||
|
@@ -21,18 +21,18 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
Apache Software License, Version 2.0:
|
||||
|
||||
* Ant-Contrib Tasks (ant-contrib:ant-contrib:1.0b3 - http://ant-contrib.sourceforge.net)
|
||||
* AWS SDK for Java - Core (com.amazonaws:aws-java-sdk-core:1.12.780 - https://aws.amazon.com/sdkforjava)
|
||||
* AWS Java SDK for AWS KMS (com.amazonaws:aws-java-sdk-kms:1.12.780 - https://aws.amazon.com/sdkforjava)
|
||||
* AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.12.780 - https://aws.amazon.com/sdkforjava)
|
||||
* JMES Path Query library (com.amazonaws:jmespath-java:1.12.780 - https://aws.amazon.com/sdkforjava)
|
||||
* AWS SDK for Java - Core (com.amazonaws:aws-java-sdk-core:1.12.785 - https://aws.amazon.com/sdkforjava)
|
||||
* AWS Java SDK for AWS KMS (com.amazonaws:aws-java-sdk-kms:1.12.785 - https://aws.amazon.com/sdkforjava)
|
||||
* AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.12.785 - https://aws.amazon.com/sdkforjava)
|
||||
* JMES Path Query library (com.amazonaws:jmespath-java:1.12.785 - https://aws.amazon.com/sdkforjava)
|
||||
* HPPC Collections (com.carrotsearch:hppc:0.8.1 - http://labs.carrotsearch.com/hppc.html/hppc)
|
||||
* com.drewnoakes:metadata-extractor (com.drewnoakes:metadata-extractor:2.19.0 - https://drewnoakes.com/code/exif/)
|
||||
* parso (com.epam:parso:2.0.14 - https://github.com/epam/parso)
|
||||
* Internet Time Utility (com.ethlo.time:itu:1.7.0 - https://github.com/ethlo/itu)
|
||||
* ClassMate (com.fasterxml:classmate:1.7.0 - https://github.com/FasterXML/java-classmate)
|
||||
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.18.2 - https://github.com/FasterXML/jackson)
|
||||
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.18.2 - https://github.com/FasterXML/jackson-core)
|
||||
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.18.2 - https://github.com/FasterXML/jackson)
|
||||
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.19.1 - https://github.com/FasterXML/jackson)
|
||||
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.19.1 - https://github.com/FasterXML/jackson-core)
|
||||
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.19.1 - https://github.com/FasterXML/jackson)
|
||||
* Jackson dataformat: CBOR (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.17.2 - https://github.com/FasterXML/jackson-dataformats-binary)
|
||||
* Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.15.2 - https://github.com/FasterXML/jackson-dataformats-binary)
|
||||
* Jackson-dataformat-YAML (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.14.0 - https://github.com/FasterXML/jackson-dataformats-text)
|
||||
@@ -57,22 +57,22 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Google APIs Client Library for Java (com.google.api-client:google-api-client:1.35.2 - https://github.com/googleapis/google-api-java-client/google-api-client)
|
||||
* Google Analytics API v3-rev145-1.23.0 (com.google.apis:google-api-services-analytics:v3-rev145-1.23.0 - http://nexus.sonatype.org/oss-repository-hosting.html/google-api-services-analytics)
|
||||
* FindBugs-jsr305 (com.google.code.findbugs:jsr305:3.0.2 - http://findbugs.sourceforge.net/)
|
||||
* Gson (com.google.code.gson:gson:2.10.1 - https://github.com/google/gson/gson)
|
||||
* Gson (com.google.code.gson:gson:2.11.0 - https://github.com/google/gson)
|
||||
* error-prone annotations (com.google.errorprone:error_prone_annotations:2.21.1 - https://errorprone.info/error_prone_annotations)
|
||||
* Guava InternalFutureFailureAccess and InternalFutures (com.google.guava:failureaccess:1.0.1 - https://github.com/google/guava/failureaccess)
|
||||
* Guava: Google Core Libraries for Java (com.google.guava:guava:32.1.3-jre - https://github.com/google/guava)
|
||||
* Guava ListenableFuture only (com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava - https://github.com/google/guava/listenablefuture)
|
||||
* Google HTTP Client Library for Java (com.google.http-client:google-http-client:1.45.3 - https://github.com/googleapis/google-http-java-client/google-http-client)
|
||||
* Google HTTP Client Library for Java (com.google.http-client:google-http-client:1.47.0 - https://github.com/googleapis/google-http-java-client/google-http-client)
|
||||
* Apache HTTP transport v2 for the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-apache-v2:1.42.0 - https://github.com/googleapis/google-http-java-client/google-http-client-apache-v2)
|
||||
* GSON extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-gson:1.43.3 - https://github.com/googleapis/google-http-java-client/google-http-client-gson)
|
||||
* Jackson 2 extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-jackson2:1.45.3 - https://github.com/googleapis/google-http-java-client/google-http-client-jackson2)
|
||||
* GSON extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-gson:1.47.0 - https://github.com/googleapis/google-http-java-client/google-http-client-gson)
|
||||
* Jackson 2 extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-jackson2:1.47.0 - https://github.com/googleapis/google-http-java-client/google-http-client-jackson2)
|
||||
* J2ObjC Annotations (com.google.j2objc:j2objc-annotations:1.3 - https://github.com/google/j2objc/)
|
||||
* J2ObjC Annotations (com.google.j2objc:j2objc-annotations:2.8 - https://github.com/google/j2objc/)
|
||||
* Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.37.0 - https://github.com/googleapis/google-oauth-java-client/google-oauth-client)
|
||||
* Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.39.0 - https://github.com/googleapis/google-oauth-java-client/google-oauth-client)
|
||||
* ConcurrentLinkedHashMap (com.googlecode.concurrentlinkedhashmap:concurrentlinkedhashmap-lru:1.4.2 - http://code.google.com/p/concurrentlinkedhashmap)
|
||||
* libphonenumber (com.googlecode.libphonenumber:libphonenumber:8.11.1 - https://github.com/google/libphonenumber/)
|
||||
* Jackcess (com.healthmarketscience.jackcess:jackcess:4.0.5 - https://jackcess.sourceforge.io)
|
||||
* Jackcess Encrypt (com.healthmarketscience.jackcess:jackcess-encrypt:4.0.2 - http://jackcessencrypt.sf.net)
|
||||
* Jackcess (com.healthmarketscience.jackcess:jackcess:4.0.8 - https://jackcess.sourceforge.io)
|
||||
* Jackcess Encrypt (com.healthmarketscience.jackcess:jackcess-encrypt:4.0.3 - http://jackcessencrypt.sf.net)
|
||||
* json-path (com.jayway.jsonpath:json-path:2.9.0 - https://github.com/jayway/JsonPath)
|
||||
* json-path-assert (com.jayway.jsonpath:json-path-assert:2.9.0 - https://github.com/jayway/JsonPath)
|
||||
* Disruptor Framework (com.lmax:disruptor:3.4.2 - http://lmax-exchange.github.com/disruptor)
|
||||
@@ -81,11 +81,15 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* JsonSchemaValidator (com.networknt:json-schema-validator:1.0.76 - https://github.com/networknt/json-schema-validator)
|
||||
* Nimbus JOSE+JWT (com.nimbusds:nimbus-jose-jwt:7.9 - https://bitbucket.org/connect2id/nimbus-jose-jwt)
|
||||
* Nimbus JOSE+JWT (com.nimbusds:nimbus-jose-jwt:9.28 - https://bitbucket.org/connect2id/nimbus-jose-jwt)
|
||||
* opencsv (com.opencsv:opencsv:5.10 - http://opencsv.sf.net)
|
||||
* opencsv (com.opencsv:opencsv:5.11.1 - http://opencsv.sf.net)
|
||||
* java-libpst (com.pff:java-libpst:0.9.3 - https://github.com/rjohnsondev/java-libpst)
|
||||
* rome (com.rometools:rome:1.19.0 - http://rometools.com/rome)
|
||||
* rome-modules (com.rometools:rome-modules:1.19.0 - http://rometools.com/rome-modules)
|
||||
* rome-utils (com.rometools:rome-utils:1.19.0 - http://rometools.com/rome-utils)
|
||||
* mockwebserver (com.squareup.okhttp3:mockwebserver:4.12.0 - https://square.github.io/okhttp/)
|
||||
* okhttp (com.squareup.okhttp3:okhttp:4.12.0 - https://square.github.io/okhttp/)
|
||||
* okio (com.squareup.okio:okio:3.6.0 - https://github.com/square/okio/)
|
||||
* okio (com.squareup.okio:okio-jvm:3.6.0 - https://github.com/square/okio/)
|
||||
* T-Digest (com.tdunning:t-digest:3.1 - https://github.com/tdunning/t-digest)
|
||||
* config (com.typesafe:config:1.3.3 - https://github.com/lightbend/config)
|
||||
* ssl-config-core (com.typesafe:ssl-config-core_2.13:0.3.8 - https://github.com/lightbend/ssl-config)
|
||||
@@ -98,15 +102,15 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* scala-logging (com.typesafe.scala-logging:scala-logging_2.13:3.9.2 - https://github.com/lightbend/scala-logging)
|
||||
* JSON library from Android SDK (com.vaadin.external.google:android-json:0.0.20131108.vaadin1 - http://developer.android.com/sdk)
|
||||
* SparseBitSet (com.zaxxer:SparseBitSet:1.3 - https://github.com/brettwooldridge/SparseBitSet)
|
||||
* Apache Commons BeanUtils (commons-beanutils:commons-beanutils:1.10.0 - https://commons.apache.org/proper/commons-beanutils)
|
||||
* Apache Commons BeanUtils (commons-beanutils:commons-beanutils:1.11.0 - https://commons.apache.org/proper/commons-beanutils)
|
||||
* Apache Commons CLI (commons-cli:commons-cli:1.9.0 - https://commons.apache.org/proper/commons-cli/)
|
||||
* Apache Commons Codec (commons-codec:commons-codec:1.17.2 - https://commons.apache.org/proper/commons-codec/)
|
||||
* Apache Commons Codec (commons-codec:commons-codec:1.18.0 - https://commons.apache.org/proper/commons-codec/)
|
||||
* Apache Commons Collections (commons-collections:commons-collections:3.2.2 - http://commons.apache.org/collections/)
|
||||
* Commons Digester (commons-digester:commons-digester:2.1 - http://commons.apache.org/digester/)
|
||||
* Apache Commons FileUpload (commons-fileupload:commons-fileupload:1.5 - https://commons.apache.org/proper/commons-fileupload/)
|
||||
* Apache Commons IO (commons-io:commons-io:2.18.0 - https://commons.apache.org/proper/commons-io/)
|
||||
* Commons FileUpload (commons-fileupload:commons-fileupload:1.2.1 - http://commons.apache.org/fileupload/)
|
||||
* Apache Commons IO (commons-io:commons-io:2.19.0 - https://commons.apache.org/proper/commons-io/)
|
||||
* Commons Lang (commons-lang:commons-lang:2.6 - http://commons.apache.org/lang/)
|
||||
* Apache Commons Logging (commons-logging:commons-logging:1.3.4 - https://commons.apache.org/proper/commons-logging/)
|
||||
* Apache Commons Logging (commons-logging:commons-logging:1.3.5 - https://commons.apache.org/proper/commons-logging/)
|
||||
* Apache Commons Validator (commons-validator:commons-validator:1.9.0 - http://commons.apache.org/proper/commons-validator/)
|
||||
* GeoJson POJOs for Jackson (de.grundid.opendatalab:geojson-jackson:1.14 - https://github.com/opendatalab-de/geojson-jackson)
|
||||
* OpenAIRE Funders Model (eu.openaire:funders-model:2.0.0 - https://api.openaire.eu)
|
||||
@@ -115,30 +119,34 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Metrics Integration for Jetty 9.3 and higher (io.dropwizard.metrics:metrics-jetty9:4.1.5 - https://metrics.dropwizard.io/metrics-jetty9)
|
||||
* Metrics Integration with JMX (io.dropwizard.metrics:metrics-jmx:4.1.5 - https://metrics.dropwizard.io/metrics-jmx)
|
||||
* JVM Integration for Metrics (io.dropwizard.metrics:metrics-jvm:4.1.5 - https://metrics.dropwizard.io/metrics-jvm)
|
||||
* io.grpc:grpc-api (io.grpc:grpc-api:1.69.0 - https://github.com/grpc/grpc-java)
|
||||
* io.grpc:grpc-context (io.grpc:grpc-context:1.69.0 - https://github.com/grpc/grpc-java)
|
||||
* io.grpc:grpc-api (io.grpc:grpc-api:1.73.0 - https://github.com/grpc/grpc-java)
|
||||
* io.grpc:grpc-context (io.grpc:grpc-context:1.73.0 - https://github.com/grpc/grpc-java)
|
||||
* micrometer-core (io.micrometer:micrometer-core:1.9.17 - https://github.com/micrometer-metrics/micrometer)
|
||||
* Netty/Buffer (io.netty:netty-buffer:4.1.117.Final - https://netty.io/netty-buffer/)
|
||||
* Netty/Buffer (io.netty:netty-buffer:4.1.99.Final - https://netty.io/netty-buffer/)
|
||||
* Netty/Codec (io.netty:netty-codec:4.1.117.Final - https://netty.io/netty-codec/)
|
||||
* Netty/Buffer (io.netty:netty-buffer:4.2.2.Final - https://netty.io/netty-buffer/)
|
||||
* Netty/Codec (io.netty:netty-codec:4.1.99.Final - https://netty.io/netty-codec/)
|
||||
* Netty/Codec (io.netty:netty-codec:4.2.2.Final - https://netty.io/netty-codec/)
|
||||
* Netty/Codec/Base (io.netty:netty-codec-base:4.2.2.Final - https://netty.io/netty-codec-base/)
|
||||
* Netty/Codec/Compression (io.netty:netty-codec-compression:4.2.2.Final - https://netty.io/netty-codec-compression/)
|
||||
* Netty/Codec/HTTP (io.netty:netty-codec-http:4.1.86.Final - https://netty.io/netty-codec-http/)
|
||||
* Netty/Codec/HTTP2 (io.netty:netty-codec-http2:4.1.86.Final - https://netty.io/netty-codec-http2/)
|
||||
* Netty/Codec/Marshalling (io.netty:netty-codec-marshalling:4.2.2.Final - https://netty.io/netty-codec-marshalling/)
|
||||
* Netty/Codec/Protobuf (io.netty:netty-codec-protobuf:4.2.2.Final - https://netty.io/netty-codec-protobuf/)
|
||||
* Netty/Codec/Socks (io.netty:netty-codec-socks:4.1.86.Final - https://netty.io/netty-codec-socks/)
|
||||
* Netty/Common (io.netty:netty-common:4.1.117.Final - https://netty.io/netty-common/)
|
||||
* Netty/Common (io.netty:netty-common:4.1.99.Final - https://netty.io/netty-common/)
|
||||
* Netty/Handler (io.netty:netty-handler:4.1.117.Final - https://netty.io/netty-handler/)
|
||||
* Netty/Common (io.netty:netty-common:4.2.2.Final - https://netty.io/netty-common/)
|
||||
* Netty/Handler (io.netty:netty-handler:4.1.99.Final - https://netty.io/netty-handler/)
|
||||
* Netty/Handler (io.netty:netty-handler:4.2.2.Final - https://netty.io/netty-handler/)
|
||||
* Netty/Handler/Proxy (io.netty:netty-handler-proxy:4.1.86.Final - https://netty.io/netty-handler-proxy/)
|
||||
* Netty/Resolver (io.netty:netty-resolver:4.1.99.Final - https://netty.io/netty-resolver/)
|
||||
* Netty/TomcatNative [BoringSSL - Static] (io.netty:netty-tcnative-boringssl-static:2.0.56.Final - https://github.com/netty/netty-tcnative/netty-tcnative-boringssl-static/)
|
||||
* Netty/TomcatNative [OpenSSL - Classes] (io.netty:netty-tcnative-classes:2.0.56.Final - https://github.com/netty/netty-tcnative/netty-tcnative-classes/)
|
||||
* Netty/Transport (io.netty:netty-transport:4.1.117.Final - https://netty.io/netty-transport/)
|
||||
* Netty/Transport (io.netty:netty-transport:4.1.99.Final - https://netty.io/netty-transport/)
|
||||
* Netty/Transport (io.netty:netty-transport:4.2.2.Final - https://netty.io/netty-transport/)
|
||||
* Netty/Transport/Classes/Epoll (io.netty:netty-transport-classes-epoll:4.1.99.Final - https://netty.io/netty-transport-classes-epoll/)
|
||||
* Netty/Transport/Native/Epoll (io.netty:netty-transport-native-epoll:4.1.99.Final - https://netty.io/netty-transport-native-epoll/)
|
||||
* Netty/Transport/Native/Unix/Common (io.netty:netty-transport-native-unix-common:4.1.117.Final - https://netty.io/netty-transport-native-unix-common/)
|
||||
* Netty/Transport/Native/Unix/Common (io.netty:netty-transport-native-unix-common:4.1.99.Final - https://netty.io/netty-transport-native-unix-common/)
|
||||
* Netty/Transport/Native/Unix/Common (io.netty:netty-transport-native-unix-common:4.2.2.Final - https://netty.io/netty-transport-native-unix-common/)
|
||||
* OpenCensus (io.opencensus:opencensus-api:0.31.1 - https://github.com/census-instrumentation/opencensus-java)
|
||||
* OpenCensus (io.opencensus:opencensus-contrib-http-util:0.31.1 - https://github.com/census-instrumentation/opencensus-java)
|
||||
* OpenTracing API (io.opentracing:opentracing-api:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-api)
|
||||
@@ -167,16 +175,17 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* JSR107 API and SPI (javax.cache:cache-api:1.1.1 - https://github.com/jsr107/jsr107spec)
|
||||
* javax.inject (javax.inject:javax.inject:1 - http://code.google.com/p/atinject/)
|
||||
* jdbm (jdbm:jdbm:1.0 - no url defined)
|
||||
* Joda-Time (joda-time:joda-time:2.13.0 - https://www.joda.org/joda-time/)
|
||||
* Byte Buddy (without dependencies) (net.bytebuddy:byte-buddy:1.16.1 - https://bytebuddy.net/byte-buddy)
|
||||
* Joda-Time (joda-time:joda-time:2.14.0 - https://www.joda.org/joda-time/)
|
||||
* Byte Buddy (without dependencies) (net.bytebuddy:byte-buddy:1.11.13 - https://bytebuddy.net/byte-buddy)
|
||||
* Byte Buddy (without dependencies) (net.bytebuddy:byte-buddy:1.12.18 - https://bytebuddy.net/byte-buddy)
|
||||
* Byte Buddy agent (net.bytebuddy:byte-buddy-agent:1.11.13 - https://bytebuddy.net/byte-buddy-agent)
|
||||
* eigenbase-properties (net.hydromatic:eigenbase-properties:1.1.5 - http://github.com/julianhyde/eigenbase-properties)
|
||||
* json-unit-core (net.javacrumbs.json-unit:json-unit-core:2.36.0 - https://github.com/lukas-krecan/JsonUnit/json-unit-core)
|
||||
* "Java Concurrency in Practice" book annotations (net.jcip:jcip-annotations:1.0 - http://jcip.net/)
|
||||
* ASM based accessors helper used by json-smart (net.minidev:accessors-smart:2.5.0 - https://urielch.github.io/)
|
||||
* ASM based accessors helper used by json-smart (net.minidev:accessors-smart:2.5.1 - https://urielch.github.io/)
|
||||
* ASM based accessors helper used by json-smart (net.minidev:accessors-smart:2.5.2 - https://urielch.github.io/)
|
||||
* JSON Small and Fast Parser (net.minidev:json-smart:2.5.0 - https://urielch.github.io/)
|
||||
* JSON Small and Fast Parser (net.minidev:json-smart:2.5.1 - https://urielch.github.io/)
|
||||
* JSON Small and Fast Parser (net.minidev:json-smart:2.5.2 - https://urielch.github.io/)
|
||||
* Abdera Core (org.apache.abdera:abdera-core:1.1.3 - http://abdera.apache.org/abdera-core)
|
||||
* I18N Libraries (org.apache.abdera:abdera-i18n:1.1.3 - http://abdera.apache.org)
|
||||
* Apache Ant Core (org.apache.ant:ant:1.10.15 - https://ant.apache.org/)
|
||||
@@ -186,18 +195,18 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Calcite Linq4j (org.apache.calcite:calcite-linq4j:1.35.0 - https://calcite.apache.org)
|
||||
* Apache Calcite Avatica (org.apache.calcite.avatica:avatica-core:1.23.0 - https://calcite.apache.org/avatica)
|
||||
* Apache Calcite Avatica Metrics (org.apache.calcite.avatica:avatica-metrics:1.23.0 - https://calcite.apache.org/avatica)
|
||||
* Apache Commons Collections (org.apache.commons:commons-collections4:4.4 - https://commons.apache.org/proper/commons-collections/)
|
||||
* Apache Commons Collections (org.apache.commons:commons-collections4:4.5.0 - https://commons.apache.org/proper/commons-collections/)
|
||||
* Apache Commons Compress (org.apache.commons:commons-compress:1.27.1 - https://commons.apache.org/proper/commons-compress/)
|
||||
* Apache Commons Configuration (org.apache.commons:commons-configuration2:2.11.0 - https://commons.apache.org/proper/commons-configuration/)
|
||||
* Apache Commons CSV (org.apache.commons:commons-csv:1.10.0 - https://commons.apache.org/proper/commons-csv/)
|
||||
* Apache Commons Configuration (org.apache.commons:commons-configuration2:2.12.0 - https://commons.apache.org/proper/commons-configuration/)
|
||||
* Apache Commons CSV (org.apache.commons:commons-csv:1.14.0 - https://commons.apache.org/proper/commons-csv/)
|
||||
* Apache Commons DBCP (org.apache.commons:commons-dbcp2:2.13.0 - https://commons.apache.org/proper/commons-dbcp/)
|
||||
* Apache Commons Digester (org.apache.commons:commons-digester3:3.2 - http://commons.apache.org/digester/)
|
||||
* Apache Commons Exec (org.apache.commons:commons-exec:1.3 - http://commons.apache.org/proper/commons-exec/)
|
||||
* Apache Commons Exec (org.apache.commons:commons-exec:1.4.0 - https://commons.apache.org/proper/commons-exec/)
|
||||
* Apache Commons Lang (org.apache.commons:commons-lang3:3.17.0 - https://commons.apache.org/proper/commons-lang/)
|
||||
* Apache Commons Math (org.apache.commons:commons-math3:3.6.1 - http://commons.apache.org/proper/commons-math/)
|
||||
* Apache Commons Pool (org.apache.commons:commons-pool2:2.12.0 - https://commons.apache.org/proper/commons-pool/)
|
||||
* Apache Commons Text (org.apache.commons:commons-text:1.13.0 - https://commons.apache.org/proper/commons-text)
|
||||
* Apache Commons Pool (org.apache.commons:commons-pool2:2.12.1 - https://commons.apache.org/proper/commons-pool/)
|
||||
* Apache Commons Text (org.apache.commons:commons-text:1.13.1 - https://commons.apache.org/proper/commons-text)
|
||||
* Curator Client (org.apache.curator:curator-client:2.13.0 - http://curator.apache.org/curator-client)
|
||||
* Curator Framework (org.apache.curator:curator-framework:2.13.0 - http://curator.apache.org/curator-framework)
|
||||
* Curator Recipes (org.apache.curator:curator-recipes:2.13.0 - http://curator.apache.org/curator-recipes)
|
||||
@@ -214,7 +223,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Apache HttpComponents Core HTTP/1.1 (org.apache.httpcomponents.core5:httpcore5:5.1.3 - https://hc.apache.org/httpcomponents-core-5.1.x/5.1.3/httpcore5/)
|
||||
* Apache HttpComponents Core HTTP/2 (org.apache.httpcomponents.core5:httpcore5-h2:5.1.3 - https://hc.apache.org/httpcomponents-core-5.1.x/5.1.3/httpcore5-h2/)
|
||||
* Apache James :: Mime4j :: Core (org.apache.james:apache-mime4j-core:0.8.12 - http://james.apache.org/mime4j/apache-mime4j-core)
|
||||
* Apache James :: Mime4j :: DOM (org.apache.james:apache-mime4j-dom:0.8.11 - http://james.apache.org/mime4j/apache-mime4j-dom)
|
||||
* Apache James :: Mime4j :: DOM (org.apache.james:apache-mime4j-dom:0.8.12 - http://james.apache.org/mime4j/apache-mime4j-dom)
|
||||
* Apache Jena - Libraries POM (org.apache.jena:apache-jena-libs:2.13.0 - http://jena.apache.org/apache-jena-libs/)
|
||||
* Apache Jena - ARQ (SPARQL 1.1 Query Engine) (org.apache.jena:jena-arq:2.13.0 - http://jena.apache.org/jena-arq/)
|
||||
* Apache Jena - Core (org.apache.jena:jena-core:2.13.0 - http://jena.apache.org/jena-core/)
|
||||
@@ -224,9 +233,9 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Kerby-kerb Util (org.apache.kerby:kerb-util:1.0.1 - http://directory.apache.org/kerby/kerby-kerb/kerb-util)
|
||||
* Kerby ASN1 Project (org.apache.kerby:kerby-asn1:1.0.1 - http://directory.apache.org/kerby/kerby-common/kerby-asn1)
|
||||
* Kerby PKIX Project (org.apache.kerby:kerby-pkix:1.0.1 - http://directory.apache.org/kerby/kerby-pkix)
|
||||
* Apache Log4j 1.x Compatibility API (org.apache.logging.log4j:log4j-1.2-api:2.24.3 - https://logging.apache.org/log4j/2.x/log4j/log4j-1.2-api/)
|
||||
* Apache Log4j API (org.apache.logging.log4j:log4j-api:2.24.3 - https://logging.apache.org/log4j/2.x/log4j/log4j-api/)
|
||||
* Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.24.3 - https://logging.apache.org/log4j/2.x/log4j/log4j-core/)
|
||||
* Apache Log4j 1.x Compatibility API (org.apache.logging.log4j:log4j-1.2-api:2.25.1 - https://logging.apache.org/log4j/2.x/)
|
||||
* Apache Log4j API (org.apache.logging.log4j:log4j-api:2.25.1 - https://logging.apache.org/log4j/2.x/)
|
||||
* Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.25.1 - https://logging.apache.org/log4j/2.x/)
|
||||
* Apache Log4j JUL Adapter (org.apache.logging.log4j:log4j-jul:2.17.2 - https://logging.apache.org/log4j/2.x/log4j-jul/)
|
||||
* Apache Log4j Layout for JSON template (org.apache.logging.log4j:log4j-layout-template-json:2.17.2 - https://logging.apache.org/log4j/2.x/log4j-layout-template-json/)
|
||||
* Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.17.2 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/)
|
||||
@@ -254,45 +263,45 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras)
|
||||
* Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-spatial3d)
|
||||
* Lucene Suggest (org.apache.lucene:lucene-suggest:8.11.4 - https://lucene.apache.org/lucene-parent/lucene-suggest)
|
||||
* Apache FontBox (org.apache.pdfbox:fontbox:2.0.33 - http://pdfbox.apache.org/)
|
||||
* Apache FontBox (org.apache.pdfbox:fontbox:2.0.34 - http://pdfbox.apache.org/)
|
||||
* PDFBox JBIG2 ImageIO plugin (org.apache.pdfbox:jbig2-imageio:3.0.4 - https://www.apache.org/jbig2-imageio/)
|
||||
* Apache JempBox (org.apache.pdfbox:jempbox:1.8.17 - http://www.apache.org/pdfbox-parent/jempbox/)
|
||||
* Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.33 - https://www.apache.org/pdfbox-parent/pdfbox/)
|
||||
* Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.31 - https://www.apache.org/pdfbox-parent/pdfbox-tools/)
|
||||
* Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.31 - https://www.apache.org/pdfbox-parent/xmpbox/)
|
||||
* Apache POI - Common (org.apache.poi:poi:5.2.5 - https://poi.apache.org/)
|
||||
* Apache POI - API based on OPC and OOXML schemas (org.apache.poi:poi-ooxml:5.2.5 - https://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi-ooxml-lite:5.2.5 - https://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi-scratchpad:5.2.5 - https://poi.apache.org/)
|
||||
* Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.34 - https://www.apache.org/pdfbox-parent/pdfbox/)
|
||||
* Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.34 - https://www.apache.org/pdfbox-parent/pdfbox-tools/)
|
||||
* Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.34 - https://www.apache.org/pdfbox-parent/xmpbox/)
|
||||
* Apache POI - Common (org.apache.poi:poi:5.4.1 - https://poi.apache.org/)
|
||||
* Apache POI - API based on OPC and OOXML schemas (org.apache.poi:poi-ooxml:5.4.1 - https://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi-ooxml-lite:5.4.1 - https://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi-scratchpad:5.4.1 - https://poi.apache.org/)
|
||||
* Apache Solr Core (org.apache.solr:solr-core:8.11.4 - https://lucene.apache.org/solr-parent/solr-core)
|
||||
* Apache Solr Solrj (org.apache.solr:solr-solrj:8.11.4 - https://lucene.apache.org/solr-parent/solr-solrj)
|
||||
* Apache Standard Taglib Implementation (org.apache.taglibs:taglibs-standard-impl:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-impl)
|
||||
* Apache Standard Taglib Specification API (org.apache.taglibs:taglibs-standard-spec:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-spec)
|
||||
* Apache Thrift (org.apache.thrift:libthrift:0.9.2 - http://thrift.apache.org)
|
||||
* Apache Tika core (org.apache.tika:tika-core:2.9.2 - https://tika.apache.org/)
|
||||
* Apache Tika Apple parser module (org.apache.tika:tika-parser-apple-module:2.9.2 - https://tika.apache.org/tika-parser-apple-module/)
|
||||
* Apache Tika audiovideo parser module (org.apache.tika:tika-parser-audiovideo-module:2.9.2 - https://tika.apache.org/tika-parser-audiovideo-module/)
|
||||
* Apache Tika cad parser module (org.apache.tika:tika-parser-cad-module:2.9.2 - https://tika.apache.org/tika-parser-cad-module/)
|
||||
* Apache Tika code parser module (org.apache.tika:tika-parser-code-module:2.9.2 - https://tika.apache.org/tika-parser-code-module/)
|
||||
* Apache Tika crypto parser module (org.apache.tika:tika-parser-crypto-module:2.9.2 - https://tika.apache.org/tika-parser-crypto-module/)
|
||||
* Apache Tika digest commons (org.apache.tika:tika-parser-digest-commons:2.9.2 - https://tika.apache.org/tika-parser-digest-commons/)
|
||||
* Apache Tika font parser module (org.apache.tika:tika-parser-font-module:2.9.2 - https://tika.apache.org/tika-parser-font-module/)
|
||||
* Apache Tika html parser module (org.apache.tika:tika-parser-html-module:2.9.2 - https://tika.apache.org/tika-parser-html-module/)
|
||||
* Apache Tika image parser module (org.apache.tika:tika-parser-image-module:2.9.2 - https://tika.apache.org/tika-parser-image-module/)
|
||||
* Apache Tika mail commons (org.apache.tika:tika-parser-mail-commons:2.9.2 - https://tika.apache.org/tika-parser-mail-commons/)
|
||||
* Apache Tika mail parser module (org.apache.tika:tika-parser-mail-module:2.9.2 - https://tika.apache.org/tika-parser-mail-module/)
|
||||
* Apache Tika Microsoft parser module (org.apache.tika:tika-parser-microsoft-module:2.9.2 - https://tika.apache.org/tika-parser-microsoft-module/)
|
||||
* Apache Tika miscellaneous office format parser module (org.apache.tika:tika-parser-miscoffice-module:2.9.2 - https://tika.apache.org/tika-parser-miscoffice-module/)
|
||||
* Apache Tika news parser module (org.apache.tika:tika-parser-news-module:2.9.2 - https://tika.apache.org/tika-parser-news-module/)
|
||||
* Apache Tika OCR parser module (org.apache.tika:tika-parser-ocr-module:2.9.2 - https://tika.apache.org/tika-parser-ocr-module/)
|
||||
* Apache Tika PDF parser module (org.apache.tika:tika-parser-pdf-module:2.9.2 - https://tika.apache.org/tika-parser-pdf-module/)
|
||||
* Apache Tika package parser module (org.apache.tika:tika-parser-pkg-module:2.9.2 - https://tika.apache.org/tika-parser-pkg-module/)
|
||||
* Apache Tika text parser module (org.apache.tika:tika-parser-text-module:2.9.2 - https://tika.apache.org/tika-parser-text-module/)
|
||||
* Apache Tika WARC parser module (org.apache.tika:tika-parser-webarchive-module:2.9.2 - https://tika.apache.org/tika-parser-webarchive-module/)
|
||||
* Apache Tika XML parser module (org.apache.tika:tika-parser-xml-module:2.9.2 - https://tika.apache.org/tika-parser-xml-module/)
|
||||
* Apache Tika XMP commons (org.apache.tika:tika-parser-xmp-commons:2.9.2 - https://tika.apache.org/tika-parser-xmp-commons/)
|
||||
* Apache Tika ZIP commons (org.apache.tika:tika-parser-zip-commons:2.9.2 - https://tika.apache.org/tika-parser-zip-commons/)
|
||||
* Apache Tika standard parser package (org.apache.tika:tika-parsers-standard-package:2.9.2 - https://tika.apache.org/tika-parsers/tika-parsers-standard/tika-parsers-standard-package/)
|
||||
* Apache Tika core (org.apache.tika:tika-core:2.9.4 - https://tika.apache.org/)
|
||||
* Apache Tika Apple parser module (org.apache.tika:tika-parser-apple-module:2.9.4 - https://tika.apache.org/tika-parser-apple-module/)
|
||||
* Apache Tika audiovideo parser module (org.apache.tika:tika-parser-audiovideo-module:2.9.4 - https://tika.apache.org/tika-parser-audiovideo-module/)
|
||||
* Apache Tika cad parser module (org.apache.tika:tika-parser-cad-module:2.9.4 - https://tika.apache.org/tika-parser-cad-module/)
|
||||
* Apache Tika code parser module (org.apache.tika:tika-parser-code-module:2.9.4 - https://tika.apache.org/tika-parser-code-module/)
|
||||
* Apache Tika crypto parser module (org.apache.tika:tika-parser-crypto-module:2.9.4 - https://tika.apache.org/tika-parser-crypto-module/)
|
||||
* Apache Tika digest commons (org.apache.tika:tika-parser-digest-commons:2.9.4 - https://tika.apache.org/tika-parser-digest-commons/)
|
||||
* Apache Tika font parser module (org.apache.tika:tika-parser-font-module:2.9.4 - https://tika.apache.org/tika-parser-font-module/)
|
||||
* Apache Tika html parser module (org.apache.tika:tika-parser-html-module:2.9.4 - https://tika.apache.org/tika-parser-html-module/)
|
||||
* Apache Tika image parser module (org.apache.tika:tika-parser-image-module:2.9.4 - https://tika.apache.org/tika-parser-image-module/)
|
||||
* Apache Tika mail commons (org.apache.tika:tika-parser-mail-commons:2.9.4 - https://tika.apache.org/tika-parser-mail-commons/)
|
||||
* Apache Tika mail parser module (org.apache.tika:tika-parser-mail-module:2.9.4 - https://tika.apache.org/tika-parser-mail-module/)
|
||||
* Apache Tika Microsoft parser module (org.apache.tika:tika-parser-microsoft-module:2.9.4 - https://tika.apache.org/tika-parser-microsoft-module/)
|
||||
* Apache Tika miscellaneous office format parser module (org.apache.tika:tika-parser-miscoffice-module:2.9.4 - https://tika.apache.org/tika-parser-miscoffice-module/)
|
||||
* Apache Tika news parser module (org.apache.tika:tika-parser-news-module:2.9.4 - https://tika.apache.org/tika-parser-news-module/)
|
||||
* Apache Tika OCR parser module (org.apache.tika:tika-parser-ocr-module:2.9.4 - https://tika.apache.org/tika-parser-ocr-module/)
|
||||
* Apache Tika PDF parser module (org.apache.tika:tika-parser-pdf-module:2.9.4 - https://tika.apache.org/tika-parser-pdf-module/)
|
||||
* Apache Tika package parser module (org.apache.tika:tika-parser-pkg-module:2.9.4 - https://tika.apache.org/tika-parser-pkg-module/)
|
||||
* Apache Tika text parser module (org.apache.tika:tika-parser-text-module:2.9.4 - https://tika.apache.org/tika-parser-text-module/)
|
||||
* Apache Tika WARC parser module (org.apache.tika:tika-parser-webarchive-module:2.9.4 - https://tika.apache.org/tika-parser-webarchive-module/)
|
||||
* Apache Tika XML parser module (org.apache.tika:tika-parser-xml-module:2.9.4 - https://tika.apache.org/tika-parser-xml-module/)
|
||||
* Apache Tika XMP commons (org.apache.tika:tika-parser-xmp-commons:2.9.4 - https://tika.apache.org/tika-parser-xmp-commons/)
|
||||
* Apache Tika ZIP commons (org.apache.tika:tika-parser-zip-commons:2.9.4 - https://tika.apache.org/tika-parser-zip-commons/)
|
||||
* Apache Tika standard parser package (org.apache.tika:tika-parsers-standard-package:2.9.4 - https://tika.apache.org/tika-parsers/tika-parsers-standard/tika-parsers-standard-package/)
|
||||
* tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.83 - https://tomcat.apache.org/)
|
||||
* tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.83 - https://tomcat.apache.org/)
|
||||
* tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.83 - https://tomcat.apache.org/)
|
||||
@@ -301,7 +310,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Apache Velocity Tools - Generic tools (org.apache.velocity.tools:velocity-tools-generic:3.1 - https://velocity.apache.org/tools/devel/velocity-tools-generic/)
|
||||
* Axiom API (org.apache.ws.commons.axiom:axiom-api:1.2.22 - http://ws.apache.org/axiom/)
|
||||
* Abdera Model (FOM) Implementation (org.apache.ws.commons.axiom:fom-impl:1.2.22 - http://ws.apache.org/axiom/implementations/fom-impl/)
|
||||
* XmlBeans (org.apache.xmlbeans:xmlbeans:5.2.0 - https://xmlbeans.apache.org/)
|
||||
* XmlBeans (org.apache.xmlbeans:xmlbeans:5.3.0 - https://xmlbeans.apache.org/)
|
||||
* Apache ZooKeeper - Server (org.apache.zookeeper:zookeeper:3.6.2 - http://zookeeper.apache.org/zookeeper)
|
||||
* Apache ZooKeeper - Jute (org.apache.zookeeper:zookeeper-jute:3.6.2 - http://zookeeper.apache.org/zookeeper-jute)
|
||||
* org.apiguardian:apiguardian-api (org.apiguardian:apiguardian-api:1.1.2 - https://github.com/apiguardian-team/apiguardian)
|
||||
@@ -348,9 +357,9 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* flyway-core (org.flywaydb:flyway-core:8.5.13 - https://flywaydb.org/flyway-core)
|
||||
* Ogg and Vorbis for Java, Core (org.gagravarr:vorbis-java-core:0.8 - https://github.com/Gagravarr/VorbisJava)
|
||||
* Apache Tika plugin for Ogg, Vorbis and FLAC (org.gagravarr:vorbis-java-tika:0.8 - https://github.com/Gagravarr/VorbisJava)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.46 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.46 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.46 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.47 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.47 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.47 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.2.5.Final - http://hibernate.org/validator/hibernate-validator)
|
||||
* Hibernate Validator Portable Extension (org.hibernate.validator:hibernate-validator-cdi:6.2.5.Final - http://hibernate.org/validator/hibernate-validator-cdi)
|
||||
* org.immutables.value-annotations (org.immutables:value-annotations:2.9.2 - http://immutables.org/value-annotations)
|
||||
@@ -360,6 +369,11 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Java Annotation Indexer (org.jboss:jandex:2.4.2.Final - http://www.jboss.org/jandex)
|
||||
* JBoss Logging 3 (org.jboss.logging:jboss-logging:3.6.1.Final - http://www.jboss.org)
|
||||
* JDOM (org.jdom:jdom2:2.0.6.1 - http://www.jdom.org)
|
||||
* IntelliJ IDEA Annotations (org.jetbrains:annotations:13.0 - http://www.jetbrains.org)
|
||||
* Kotlin Stdlib (org.jetbrains.kotlin:kotlin-stdlib:1.8.21 - https://kotlinlang.org/)
|
||||
* Kotlin Stdlib Common (org.jetbrains.kotlin:kotlin-stdlib-common:1.8.21 - https://kotlinlang.org/)
|
||||
* Kotlin Stdlib Jdk7 (org.jetbrains.kotlin:kotlin-stdlib-jdk7:1.8.21 - https://kotlinlang.org/)
|
||||
* Kotlin Stdlib Jdk8 (org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.8.21 - https://kotlinlang.org/)
|
||||
* jtwig-core (org.jtwig:jtwig-core:5.87.0.RELEASE - http://jtwig.org)
|
||||
* jtwig-reflection (org.jtwig:jtwig-reflection:5.87.0.RELEASE - http://jtwig.org)
|
||||
* jtwig-spring (org.jtwig:jtwig-spring:5.87.0.RELEASE - http://jtwig.org)
|
||||
@@ -377,8 +391,9 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Jetty Servlet Tester (org.mortbay.jetty:jetty-servlet-tester:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-servlet-tester)
|
||||
* Jetty Utilities (org.mortbay.jetty:jetty-util:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-util)
|
||||
* Servlet Specification API (org.mortbay.jetty:servlet-api:2.5-20081211 - http://jetty.mortbay.org/servlet-api)
|
||||
* jwarc (org.netpreserve:jwarc:0.29.0 - https://github.com/iipc/jwarc)
|
||||
* jwarc (org.netpreserve:jwarc:0.31.1 - https://github.com/iipc/jwarc)
|
||||
* Objenesis (org.objenesis:objenesis:3.2 - http://objenesis.org/objenesis)
|
||||
* org.opentest4j:opentest4j (org.opentest4j:opentest4j:1.3.0 - https://github.com/ota4j-team/opentest4j)
|
||||
* parboiled-core (org.parboiled:parboiled-core:1.1.7 - http://parboiled.org)
|
||||
* parboiled-java (org.parboiled:parboiled-java:1.1.7 - http://parboiled.org)
|
||||
* RRD4J (org.rrd4j:rrd4j:3.5 - https://github.com/rrd4j/rrd4j/)
|
||||
@@ -438,7 +453,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* SWORD v2 :: Common Server Library (org.swordapp:sword2-server:1.0 - http://www.swordapp.org/)
|
||||
* snappy-java (org.xerial.snappy:snappy-java:1.1.10.1 - https://github.com/xerial/snappy-java)
|
||||
* xml-matchers (org.xmlmatchers:xml-matchers:0.10 - http://code.google.com/p/xml-matchers/)
|
||||
* org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.10.0 - https://www.xmlunit.org/)
|
||||
* org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.10.2 - https://www.xmlunit.org/)
|
||||
* org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.9.1 - https://www.xmlunit.org/)
|
||||
* org.xmlunit:xmlunit-placeholders (org.xmlunit:xmlunit-placeholders:2.9.1 - https://www.xmlunit.org/xmlunit-placeholders/)
|
||||
* SnakeYAML (org.yaml:snakeyaml:1.30 - https://bitbucket.org/snakeyaml/snakeyaml)
|
||||
@@ -456,15 +471,15 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Protocol Buffers [Core] (com.google.protobuf:protobuf-java:3.15.0 - https://developers.google.com/protocol-buffers/protobuf-java/)
|
||||
* JZlib (com.jcraft:jzlib:1.1.3 - http://www.jcraft.com/jzlib/)
|
||||
* jmustache (com.samskivert:jmustache:1.15 - http://github.com/samskivert/jmustache)
|
||||
* dnsjava (dnsjava:dnsjava:3.6.2 - https://github.com/dnsjava/dnsjava)
|
||||
* dnsjava (dnsjava:dnsjava:3.6.3 - https://github.com/dnsjava/dnsjava)
|
||||
* jaxen (jaxen:jaxen:2.0.0 - http://www.cafeconleche.org/jaxen/jaxen)
|
||||
* ANTLR 4 Runtime (org.antlr:antlr4-runtime:4.5.1-1 - http://www.antlr.org/antlr4-runtime)
|
||||
* commons-compiler (org.codehaus.janino:commons-compiler:3.1.8 - http://janino-compiler.github.io/commons-compiler/)
|
||||
* janino (org.codehaus.janino:janino:3.1.8 - http://janino-compiler.github.io/janino/)
|
||||
* Stax2 API (org.codehaus.woodstox:stax2-api:4.2.1 - http://github.com/FasterXML/stax2-api)
|
||||
* Hamcrest Date (org.exparity:hamcrest-date:2.0.8 - https://github.com/exparity/hamcrest-date)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.46 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.46 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.47 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.47 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* Hamcrest (org.hamcrest:hamcrest:2.2 - http://hamcrest.org/JavaHamcrest/)
|
||||
* Hamcrest Core (org.hamcrest:hamcrest-core:2.2 - http://hamcrest.org/JavaHamcrest/)
|
||||
* HdrHistogram (org.hdrhistogram:HdrHistogram:2.1.12 - http://hdrhistogram.github.io/HdrHistogram/)
|
||||
@@ -474,9 +489,10 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* asm-commons (org.ow2.asm:asm-commons:9.3 - http://asm.ow2.io/)
|
||||
* ASM Tree (org.ow2.asm:asm-tree:5.0.3 - http://asm.objectweb.org/asm-tree/)
|
||||
* ASM Util (org.ow2.asm:asm-util:5.0.3 - http://asm.objectweb.org/asm-util/)
|
||||
* PostgreSQL JDBC Driver (org.postgresql:postgresql:42.7.5 - https://jdbc.postgresql.org)
|
||||
* PostgreSQL JDBC Driver (org.postgresql:postgresql:42.7.7 - https://jdbc.postgresql.org)
|
||||
* Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections)
|
||||
* JMatIO (org.tallison:jmatio:1.5 - https://github.com/tballison/jmatio)
|
||||
* XZ for Java (org.tukaani:xz:1.10 - https://tukaani.org/xz/java.html)
|
||||
* XMLUnit for Java (xmlunit:xmlunit:1.3 - http://xmlunit.sourceforge.net/)
|
||||
|
||||
CC0:
|
||||
@@ -495,7 +511,6 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* JavaBeans Activation Framework (JAF) (javax.activation:activation:1.1 - http://java.sun.com/products/javabeans/jaf/index.jsp)
|
||||
* JavaBeans Activation Framework API jar (javax.activation:javax.activation-api:1.2.0 - http://java.net/all/javax.activation-api/)
|
||||
* javax.annotation API (javax.annotation:javax.annotation-api:1.3.2 - http://jcp.org/en/jsr/detail?id=250)
|
||||
* Expression Language 3.0 API (javax.el:javax.el-api:3.0.0 - http://uel-spec.java.net)
|
||||
* Java Servlet API (javax.servlet:javax.servlet-api:3.1.0 - http://servlet-spec.java.net)
|
||||
* javax.transaction API (javax.transaction:javax.transaction-api:1.3 - http://jta-spec.java.net)
|
||||
* jaxb-api (javax.xml.bind:jaxb-api:2.3.1 - https://github.com/javaee/jaxb-spec/jaxb-api)
|
||||
@@ -506,8 +521,8 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* OSGi resource locator (org.glassfish.hk2:osgi-resource-locator:1.0.3 - https://projects.eclipse.org/projects/ee4j/osgi-resource-locator)
|
||||
* aopalliance version 1.0 repackaged as a module (org.glassfish.hk2.external:aopalliance-repackaged:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/aopalliance-repackaged)
|
||||
* javax.inject:1 as OSGi bundle (org.glassfish.hk2.external:jakarta.inject:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/jakarta.inject)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.46 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.46 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.47 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.47 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* Java Transaction API (org.jboss.spec.javax.transaction:jboss-transaction-api_1.2_spec:1.1.1.Final - http://www.jboss.org/jboss-transaction-api_1.2_spec)
|
||||
|
||||
Cordra (Version 2) License Agreement:
|
||||
@@ -528,8 +543,8 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* javax.persistence-api (javax.persistence:javax.persistence-api:2.2 - https://github.com/javaee/jpa-spec)
|
||||
* JAXB Runtime (org.glassfish.jaxb:jaxb-runtime:2.3.9 - https://eclipse-ee4j.github.io/jaxb-ri/)
|
||||
* TXW2 Runtime (org.glassfish.jaxb:txw2:2.3.9 - https://eclipse-ee4j.github.io/jaxb-ri/)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.46 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.46 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.47 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.47 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* Java Persistence API, Version 2.1 (org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.2.Final - http://hibernate.org)
|
||||
* org.locationtech.jts:jts-core (org.locationtech.jts:jts-core:1.19.0 - https://www.locationtech.org/projects/technology.jts/jts-modules/jts-core)
|
||||
* org.locationtech.jts.io:jts-io-common (org.locationtech.jts.io:jts-io-common:1.19.0 - https://www.locationtech.org/projects/technology.jts/jts-modules/jts-io/jts-io-common)
|
||||
@@ -584,10 +599,13 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* OSGi resource locator (org.glassfish.hk2:osgi-resource-locator:1.0.3 - https://projects.eclipse.org/projects/ee4j/osgi-resource-locator)
|
||||
* aopalliance version 1.0 repackaged as a module (org.glassfish.hk2.external:aopalliance-repackaged:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/aopalliance-repackaged)
|
||||
* javax.inject:1 as OSGi bundle (org.glassfish.hk2.external:jakarta.inject:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/jakarta.inject)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.46 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.46 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.46 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.47 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.47 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.47 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* Java Persistence API, Version 2.1 (org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.2.Final - http://hibernate.org)
|
||||
* JUnit Platform Commons (org.junit.platform:junit-platform-commons:1.11.4 - https://junit.org/junit5/)
|
||||
* JUnit Platform Engine API (org.junit.platform:junit-platform-engine:1.11.4 - https://junit.org/junit5/)
|
||||
* JUnit Vintage Engine (org.junit.vintage:junit-vintage-engine:5.11.4 - https://junit.org/junit5/)
|
||||
* org.locationtech.jts:jts-core (org.locationtech.jts:jts-core:1.19.0 - https://www.locationtech.org/projects/technology.jts/jts-modules/jts-core)
|
||||
* org.locationtech.jts.io:jts-io-common (org.locationtech.jts.io:jts-io-common:1.19.0 - https://www.locationtech.org/projects/technology.jts/jts-modules/jts-io/jts-io-common)
|
||||
* Jetty Server (org.mortbay.jetty:jetty:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/modules/jetty)
|
||||
@@ -596,11 +614,11 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
|
||||
GENERAL PUBLIC LICENSE, version 3 (GPL-3.0):
|
||||
|
||||
* juniversalchardet (com.github.albfernandez:juniversalchardet:2.4.0 - https://github.com/albfernandez/juniversalchardet)
|
||||
* juniversalchardet (com.github.albfernandez:juniversalchardet:2.5.0 - https://github.com/albfernandez/juniversalchardet)
|
||||
|
||||
GNU LESSER GENERAL PUBLIC LICENSE, version 3 (LGPL-3.0):
|
||||
|
||||
* juniversalchardet (com.github.albfernandez:juniversalchardet:2.4.0 - https://github.com/albfernandez/juniversalchardet)
|
||||
* juniversalchardet (com.github.albfernandez:juniversalchardet:2.5.0 - https://github.com/albfernandez/juniversalchardet)
|
||||
|
||||
GNU Lesser General Public License (LGPL):
|
||||
|
||||
@@ -626,9 +644,9 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
|
||||
* RE2/J (com.google.re2j:re2j:1.2 - http://github.com/google/re2j)
|
||||
|
||||
Handle.Net Public License Agreement (Ver.2):
|
||||
Handle.Net Public License Agreement (Ver.3):
|
||||
|
||||
* Handle Server (net.handle:handle:9.3.1 - https://www.handle.net)
|
||||
* Handle Server (net.handle:handle:9.3.2 - https://www.handle.net)
|
||||
|
||||
ISC License:
|
||||
|
||||
@@ -643,15 +661,15 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* s3mock (io.findify:s3mock_2.13:0.2.6 - https://github.com/findify/s3mock)
|
||||
* ClassGraph (io.github.classgraph:classgraph:4.8.154 - https://github.com/classgraph/classgraph)
|
||||
* JOpt Simple (net.sf.jopt-simple:jopt-simple:5.0.4 - http://jopt-simple.github.io/jopt-simple)
|
||||
* Bouncy Castle S/MIME API (org.bouncycastle:bcmail-jdk18on:1.77 - https://www.bouncycastle.org/java.html)
|
||||
* Bouncy Castle PKIX, CMS, EAC, TSP, PKCS, OCSP, CMP, and CRMF APIs (org.bouncycastle:bcpkix-jdk18on:1.80 - https://www.bouncycastle.org/download/bouncy-castle-java/)
|
||||
* Bouncy Castle Provider (org.bouncycastle:bcprov-jdk18on:1.80 - https://www.bouncycastle.org/download/bouncy-castle-java/)
|
||||
* Bouncy Castle ASN.1 Extension and Utility APIs (org.bouncycastle:bcutil-jdk18on:1.80 - https://www.bouncycastle.org/download/bouncy-castle-java/)
|
||||
* Bouncy Castle JavaMail S/MIME APIs (org.bouncycastle:bcmail-jdk18on:1.80 - https://www.bouncycastle.org/download/bouncy-castle-java/)
|
||||
* Bouncy Castle PKIX, CMS, EAC, TSP, PKCS, OCSP, CMP, and CRMF APIs (org.bouncycastle:bcpkix-jdk18on:1.81 - https://www.bouncycastle.org/download/bouncy-castle-java/)
|
||||
* Bouncy Castle Provider (org.bouncycastle:bcprov-jdk18on:1.81 - https://www.bouncycastle.org/download/bouncy-castle-java/)
|
||||
* Bouncy Castle ASN.1 Extension and Utility APIs (org.bouncycastle:bcutil-jdk18on:1.81 - https://www.bouncycastle.org/download/bouncy-castle-java/)
|
||||
* org.brotli:dec (org.brotli:dec:0.1.2 - http://brotli.org/dec)
|
||||
* Checker Qual (org.checkerframework:checker-qual:3.23.0 - https://checkerframework.org)
|
||||
* Checker Qual (org.checkerframework:checker-qual:3.48.3 - https://checkerframework.org/)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.46 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.46 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* Checker Qual (org.checkerframework:checker-qual:3.49.3 - https://checkerframework.org/)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.47 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.47 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* mockito-core (org.mockito:mockito-core:3.12.4 - https://github.com/mockito/mockito)
|
||||
* mockito-inline (org.mockito:mockito-inline:3.12.4 - https://github.com/mockito/mockito)
|
||||
* ORCID - Model (org.orcid:orcid-model:3.0.2 - http://github.com/ORCID/orcid-model)
|
||||
@@ -664,27 +682,26 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* jquery (org.webjars.bowergithub.jquery:jquery-dist:3.7.1 - https://www.webjars.org)
|
||||
* urijs (org.webjars.bowergithub.medialize:uri.js:1.19.11 - https://www.webjars.org)
|
||||
* bootstrap (org.webjars.bowergithub.twbs:bootstrap:4.6.2 - https://www.webjars.org)
|
||||
* core-js (org.webjars.npm:core-js:3.40.0 - https://www.webjars.org)
|
||||
* @json-editor/json-editor (org.webjars.npm:json-editor__json-editor:2.15.1 - https://www.webjars.org)
|
||||
* core-js (org.webjars.npm:core-js:3.42.0 - https://www.webjars.org)
|
||||
* @json-editor/json-editor (org.webjars.npm:json-editor__json-editor:2.15.2 - https://www.webjars.org)
|
||||
|
||||
Mozilla Public License:
|
||||
|
||||
* juniversalchardet (com.github.albfernandez:juniversalchardet:2.4.0 - https://github.com/albfernandez/juniversalchardet)
|
||||
* juniversalchardet (com.github.albfernandez:juniversalchardet:2.5.0 - https://github.com/albfernandez/juniversalchardet)
|
||||
* H2 Database Engine (com.h2database:h2:2.3.232 - https://h2database.com)
|
||||
* Saxon-HE (net.sf.saxon:Saxon-HE:9.8.0-14 - http://www.saxonica.com/)
|
||||
* Saxon-HE (net.sf.saxon:Saxon-HE:9.9.1-8 - http://www.saxonica.com/)
|
||||
* Javassist (org.javassist:javassist:3.30.2-GA - https://www.javassist.org/)
|
||||
* Mozilla Rhino (org.mozilla:rhino:1.7.7.2 - https://developer.mozilla.org/en/Rhino)
|
||||
|
||||
Public Domain:
|
||||
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.46 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.46 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.46 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.47 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.47 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.47 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* HdrHistogram (org.hdrhistogram:HdrHistogram:2.1.12 - http://hdrhistogram.github.io/HdrHistogram/)
|
||||
* JSON in Java (org.json:json:20231013 - https://github.com/douglascrockford/JSON-java)
|
||||
* LatencyUtils (org.latencyutils:LatencyUtils:2.0.3 - http://latencyutils.github.io/LatencyUtils/)
|
||||
* Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections)
|
||||
* XZ for Java (org.tukaani:xz:1.9 - https://tukaani.org/xz/java.html)
|
||||
|
||||
UnRar License:
|
||||
|
||||
@@ -696,10 +713,10 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
|
||||
W3C license:
|
||||
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.46 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.46 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.47 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.47 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
|
||||
jQuery license:
|
||||
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.46 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.46 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.47 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.47 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
|
@@ -7,4 +7,5 @@
|
||||
<!-- TODO: We should have these turned on. But, currently there's a known bug with indentation checks
|
||||
on JMockIt Expectations blocks and similar. See https://github.com/checkstyle/checkstyle/issues/3739 -->
|
||||
<suppress checks="Indentation" files="src[/\\]test[/\\]java"/>
|
||||
<suppress checks="Regexp" files="DSpaceHttpClientFactory\.java"/>
|
||||
</suppressions>
|
||||
|
@@ -136,5 +136,22 @@ For more information on CheckStyle configurations below, see: http://checkstyle.
|
||||
<module name="OneStatementPerLine"/>
|
||||
<!-- Require that "catch" statements are not empty (must at least contain a comment) -->
|
||||
<module name="EmptyCatchBlock"/>
|
||||
|
||||
<!-- Require to use DSpaceHttpClientFactory.getClient() statement instead of creating directly the client -->
|
||||
<module name="Regexp">
|
||||
<property name="format" value="HttpClientBuilder\.create\s*\(\s*\)" />
|
||||
<property name="message" value="Use DSpaceHttpClientFactory.getClient() instead of HttpClientBuilder.create()" />
|
||||
<property name="illegalPattern" value="true"/>
|
||||
<property name="ignoreComments" value="true"/>
|
||||
</module>
|
||||
<!-- Require to use DSpaceHttpClientFactory.getClient() statement instead of creating directly the client -->
|
||||
<module name="Regexp">
|
||||
<property name="format" value="HttpClients\.createDefault\s*\(\s*\)" />
|
||||
<property name="message" value="Use DSpaceHttpClientFactory.getClient() instead of HttpClients.createDefault()" />
|
||||
<property name="illegalPattern" value="true"/>
|
||||
<property name="ignoreComments" value="true"/>
|
||||
</module>
|
||||
|
||||
|
||||
</module>
|
||||
</module>
|
||||
|
@@ -12,7 +12,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>7.6.3</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -99,20 +99,6 @@
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>build-helper-maven-plugin</artifactId>
|
||||
<version>3.6.0</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>validate</phase>
|
||||
<goals>
|
||||
<goal>maven-version</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>buildnumber-maven-plugin</artifactId>
|
||||
@@ -475,10 +461,6 @@
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-dbcp2</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-fileupload</groupId>
|
||||
<artifactId>commons-fileupload</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
@@ -505,11 +487,6 @@
|
||||
<artifactId>javax.servlet-api</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<!-- Only necessary because javax.el.MethodNotFoundException is used in several classes -->
|
||||
<dependency>
|
||||
<groupId>javax.el</groupId>
|
||||
<artifactId>javax.el-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.annotation</groupId>
|
||||
<artifactId>javax.annotation-api</artifactId>
|
||||
@@ -651,7 +628,7 @@
|
||||
<dependency>
|
||||
<groupId>dnsjava</groupId>
|
||||
<artifactId>dnsjava</artifactId>
|
||||
<version>3.6.2</version>
|
||||
<version>3.6.3</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -761,7 +738,7 @@
|
||||
<dependency>
|
||||
<groupId>com.amazonaws</groupId>
|
||||
<artifactId>aws-java-sdk-s3</artifactId>
|
||||
<version>1.12.780</version>
|
||||
<version>1.12.791</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -810,7 +787,7 @@
|
||||
<dependency>
|
||||
<groupId>com.opencsv</groupId>
|
||||
<artifactId>opencsv</artifactId>
|
||||
<version>5.10</version>
|
||||
<version>5.12.0</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Email templating -->
|
||||
@@ -876,6 +853,12 @@
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.squareup.okhttp3</groupId>
|
||||
<artifactId>mockwebserver</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
@@ -887,32 +870,32 @@
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-buffer</artifactId>
|
||||
<version>4.1.117.Final</version>
|
||||
<version>4.2.6.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-transport</artifactId>
|
||||
<version>4.1.117.Final</version>
|
||||
<version>4.2.6.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-transport-native-unix-common</artifactId>
|
||||
<version>4.1.117.Final</version>
|
||||
<version>4.2.6.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-common</artifactId>
|
||||
<version>4.1.117.Final</version>
|
||||
<version>4.2.6.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-handler</artifactId>
|
||||
<version>4.1.117.Final</version>
|
||||
<version>4.2.6.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-codec</artifactId>
|
||||
<version>4.1.117.Final</version>
|
||||
<version>4.2.6.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.velocity</groupId>
|
||||
@@ -922,7 +905,7 @@
|
||||
<dependency>
|
||||
<groupId>org.xmlunit</groupId>
|
||||
<artifactId>xmlunit-core</artifactId>
|
||||
<version>2.10.0</version>
|
||||
<version>2.10.3</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
@@ -10,7 +10,6 @@ package org.dspace.administer;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.transform.TransformerException;
|
||||
import javax.xml.xpath.XPath;
|
||||
@@ -18,6 +17,7 @@ import javax.xml.xpath.XPathConstants;
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Node;
|
||||
import org.w3c.dom.NodeList;
|
||||
@@ -49,8 +49,9 @@ public class RegistryImporter {
|
||||
*/
|
||||
public static Document loadXML(String filename)
|
||||
throws IOException, ParserConfigurationException, SAXException {
|
||||
DocumentBuilder builder = DocumentBuilderFactory.newInstance()
|
||||
.newDocumentBuilder();
|
||||
// This XML builder will *not* disable external entities as XML
|
||||
// registries are considered trusted content
|
||||
DocumentBuilder builder = XMLUtils.getTrustedDocumentBuilder();
|
||||
|
||||
Document document = builder.parse(new File(filename));
|
||||
|
||||
|
@@ -13,7 +13,6 @@ import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.transform.TransformerException;
|
||||
import javax.xml.xpath.XPath;
|
||||
@@ -21,7 +20,15 @@ import javax.xml.xpath.XPathConstants;
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.BitstreamFormat;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
@@ -41,7 +48,7 @@ import org.xml.sax.SAXException;
|
||||
* <P>
|
||||
* <code>RegistryLoader -bitstream bitstream-formats.xml</code>
|
||||
* <P>
|
||||
* <code>RegistryLoader -dc dc-types.xml</code>
|
||||
* <code>RegistryLoader -metadata dc-types.xml</code>
|
||||
*
|
||||
* @author Robert Tansley
|
||||
* @version $Revision$
|
||||
@@ -50,7 +57,7 @@ public class RegistryLoader {
|
||||
/**
|
||||
* log4j category
|
||||
*/
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(RegistryLoader.class);
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(RegistryLoader.class);
|
||||
|
||||
protected static BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance()
|
||||
.getBitstreamFormatService();
|
||||
@@ -67,50 +74,99 @@ public class RegistryLoader {
|
||||
* @throws Exception if error
|
||||
*/
|
||||
public static void main(String[] argv) throws Exception {
|
||||
String usage = "Usage: " + RegistryLoader.class.getName()
|
||||
+ " (-bitstream | -metadata) registry-file.xml";
|
||||
|
||||
Context context = null;
|
||||
// Set up command-line options and parse arguments
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
Options options = createCommandLineOptions();
|
||||
|
||||
try {
|
||||
context = new Context();
|
||||
CommandLine line = parser.parse(options, argv);
|
||||
|
||||
// Check if help option was entered or no options provided
|
||||
if (line.hasOption('h') || line.getOptions().length == 0) {
|
||||
printHelp(options);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
Context context = new Context();
|
||||
|
||||
// Can't update registries anonymously, so we need to turn off
|
||||
// authorisation
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
// Work out what we're loading
|
||||
if (argv[0].equalsIgnoreCase("-bitstream")) {
|
||||
RegistryLoader.loadBitstreamFormats(context, argv[1]);
|
||||
} else if (argv[0].equalsIgnoreCase("-metadata")) {
|
||||
// Call MetadataImporter, as it handles Metadata schema updates
|
||||
MetadataImporter.loadRegistry(argv[1], true);
|
||||
} else {
|
||||
System.err.println(usage);
|
||||
try {
|
||||
// Work out what we're loading
|
||||
if (line.hasOption('b')) {
|
||||
String filename = line.getOptionValue('b');
|
||||
if (StringUtils.isEmpty(filename)) {
|
||||
System.err.println("No file path provided for bitstream format registry");
|
||||
printHelp(options);
|
||||
System.exit(1);
|
||||
}
|
||||
RegistryLoader.loadBitstreamFormats(context, filename);
|
||||
} else if (line.hasOption('m')) {
|
||||
String filename = line.getOptionValue('m');
|
||||
if (StringUtils.isEmpty(filename)) {
|
||||
System.err.println("No file path provided for metadata registry");
|
||||
printHelp(options);
|
||||
System.exit(1);
|
||||
}
|
||||
// Call MetadataImporter, as it handles Metadata schema updates
|
||||
MetadataImporter.loadRegistry(filename, true);
|
||||
} else {
|
||||
System.err.println("No registry type specified");
|
||||
printHelp(options);
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
// Commit changes and close Context
|
||||
context.complete();
|
||||
System.exit(0);
|
||||
} catch (Exception e) {
|
||||
log.fatal(LogHelper.getHeader(context, "error_loading_registries", ""), e);
|
||||
System.err.println("Error: \n - " + e.getMessage());
|
||||
System.exit(1);
|
||||
} finally {
|
||||
// Clean up our context, if it still exists & it was never completed
|
||||
if (context != null && context.isValid()) {
|
||||
context.abort();
|
||||
}
|
||||
}
|
||||
|
||||
// Commit changes and close Context
|
||||
context.complete();
|
||||
|
||||
System.exit(0);
|
||||
} catch (ArrayIndexOutOfBoundsException ae) {
|
||||
System.err.println(usage);
|
||||
|
||||
} catch (ParseException e) {
|
||||
System.err.println("Error parsing command-line arguments: " + e.getMessage());
|
||||
printHelp(options);
|
||||
System.exit(1);
|
||||
} catch (Exception e) {
|
||||
log.fatal(LogHelper.getHeader(context, "error_loading_registries",
|
||||
""), e);
|
||||
|
||||
System.err.println("Error: \n - " + e.getMessage());
|
||||
System.exit(1);
|
||||
} finally {
|
||||
// Clean up our context, if it still exists & it was never completed
|
||||
if (context != null && context.isValid()) {
|
||||
context.abort();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the command-line options
|
||||
* @return the command-line options
|
||||
*/
|
||||
private static Options createCommandLineOptions() {
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("b", "bitstream", true, "load bitstream format registry from specified file");
|
||||
options.addOption("m", "metadata", true, "load metadata registry from specified file");
|
||||
options.addOption("h", "help", false, "print this help message");
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Print the help message
|
||||
* @param options the command-line options
|
||||
*/
|
||||
private static void printHelp(Options options) {
|
||||
HelpFormatter formatter = new HelpFormatter();
|
||||
formatter.printHelp("RegistryLoader",
|
||||
"Load bitstream format or metadata registries into the database\n",
|
||||
options,
|
||||
"\nExamples:\n" +
|
||||
" RegistryLoader -b bitstream-formats.xml\n" +
|
||||
" RegistryLoader -m dc-types.xml",
|
||||
true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Load Bitstream Format metadata
|
||||
*
|
||||
@@ -210,8 +266,9 @@ public class RegistryLoader {
|
||||
*/
|
||||
private static Document loadXML(String filename) throws IOException,
|
||||
ParserConfigurationException, SAXException {
|
||||
DocumentBuilder builder = DocumentBuilderFactory.newInstance()
|
||||
.newDocumentBuilder();
|
||||
// This XML builder will *not* disable external entities as XML
|
||||
// registries are considered trusted content
|
||||
DocumentBuilder builder = XMLUtils.getTrustedDocumentBuilder();
|
||||
|
||||
return builder.parse(new File(filename));
|
||||
}
|
||||
@@ -221,7 +278,7 @@ public class RegistryLoader {
|
||||
* contains:
|
||||
* <P>
|
||||
* <code>
|
||||
* <foo><mimetype>application/pdf</mimetype></foo>
|
||||
* <foo><mimetype>application/pdf</mimetype></foo>
|
||||
* </code>
|
||||
* passing this the <code>foo</code> node and <code>mimetype</code> will
|
||||
* return <code>application/pdf</code>.
|
||||
@@ -262,10 +319,10 @@ public class RegistryLoader {
|
||||
* document contains:
|
||||
* <P>
|
||||
* <code>
|
||||
* <foo>
|
||||
* <bar>val1</bar>
|
||||
* <bar>val2</bar>
|
||||
* </foo>
|
||||
* <foo>
|
||||
* <bar>val1</bar>
|
||||
* <bar>val2</bar>
|
||||
* </foo>
|
||||
* </code>
|
||||
* passing this the <code>foo</code> node and <code>bar</code> will
|
||||
* return <code>val1</code> and <code>val2</code>.
|
||||
|
@@ -27,7 +27,6 @@ import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.transform.TransformerException;
|
||||
import javax.xml.xpath.XPath;
|
||||
@@ -43,6 +42,7 @@ import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
@@ -613,8 +613,8 @@ public class StructBuilder {
|
||||
*/
|
||||
private static org.w3c.dom.Document loadXML(InputStream input)
|
||||
throws IOException, ParserConfigurationException, SAXException {
|
||||
DocumentBuilder builder = DocumentBuilderFactory.newInstance()
|
||||
.newDocumentBuilder();
|
||||
// This builder factory does not disable external DTD, entities, etc.
|
||||
DocumentBuilder builder = XMLUtils.getTrustedDocumentBuilder();
|
||||
|
||||
org.w3c.dom.Document document = builder.parse(input);
|
||||
|
||||
|
@@ -18,6 +18,7 @@ import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
import java.text.DateFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.time.ZoneOffset;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.Iterator;
|
||||
@@ -154,7 +155,7 @@ public class BulkAccessControl extends DSpaceRunnable<BulkAccessControlScriptCon
|
||||
}
|
||||
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
mapper.setTimeZone(TimeZone.getTimeZone("UTC"));
|
||||
mapper.setTimeZone(TimeZone.getTimeZone(ZoneOffset.UTC));
|
||||
BulkAccessControlInput accessControl;
|
||||
context = new Context(Context.Mode.BATCH_EDIT);
|
||||
setEPerson(context);
|
||||
@@ -416,7 +417,7 @@ public class BulkAccessControl extends DSpaceRunnable<BulkAccessControlScriptCon
|
||||
discoverQuery.setQuery(query);
|
||||
discoverQuery.setStart(start);
|
||||
discoverQuery.setMaxResults(limit);
|
||||
|
||||
discoverQuery.setSortField("search.resourceid", DiscoverQuery.SORT_ORDER.asc);
|
||||
return discoverQuery;
|
||||
}
|
||||
|
||||
|
@@ -14,6 +14,8 @@ import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.DefaultParser.Builder;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataDSpaceCsvExportServiceImpl;
|
||||
@@ -143,7 +145,7 @@ public class MetadataExportSearch extends DSpaceRunnable<MetadataExportSearchScr
|
||||
|
||||
Iterator<Item> itemIterator = searchService.iteratorSearch(context, dso, discoverQuery);
|
||||
handler.logDebug("creating dspacecsv");
|
||||
DSpaceCSV dSpaceCSV = metadataDSpaceCsvExportService.export(context, itemIterator, true);
|
||||
DSpaceCSV dSpaceCSV = metadataDSpaceCsvExportService.export(context, itemIterator, true, handler);
|
||||
handler.logDebug("writing to file " + getFileNameOrExportFile());
|
||||
handler.writeFilestream(context, getFileNameOrExportFile(), dSpaceCSV.getInputStream(), EXPORT_CSV);
|
||||
context.restoreAuthSystemState();
|
||||
@@ -167,4 +169,14 @@ public class MetadataExportSearch extends DSpaceRunnable<MetadataExportSearchScr
|
||||
}
|
||||
return scopeObj;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected StepResult parse(String[] args) throws ParseException {
|
||||
commandLine = new DefaultParser().parse(getScriptConfiguration().getOptions(), args);
|
||||
Builder builder = new DefaultParser().builder();
|
||||
builder.setStripLeadingAndTrailingQuotes(false);
|
||||
commandLine = builder.build().parse(getScriptConfiguration().getOptions(), args);
|
||||
setup();
|
||||
return StepResult.Continue;
|
||||
}
|
||||
}
|
||||
|
@@ -494,7 +494,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
||||
|
||||
// Check it has an owning collection
|
||||
List<String> collections = line.get("collection");
|
||||
if (collections == null) {
|
||||
if (collections == null || collections.isEmpty()) {
|
||||
throw new MetadataImportException(
|
||||
"New items must have a 'collection' assigned in the form of a handle");
|
||||
}
|
||||
|
@@ -0,0 +1,152 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.client;
|
||||
|
||||
import static org.apache.commons.collections4.ListUtils.emptyIfNull;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.http.HttpRequestInterceptor;
|
||||
import org.apache.http.HttpResponseInterceptor;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.http.client.config.RequestConfig;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.utils.DSpace;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Factory of {@link HttpClient} with common configurations.
|
||||
*
|
||||
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
|
||||
*
|
||||
*/
|
||||
public class DSpaceHttpClientFactory {
|
||||
|
||||
@Autowired
|
||||
private ConfigurationService configurationService;
|
||||
|
||||
@Autowired
|
||||
private DSpaceProxyRoutePlanner proxyRoutePlanner;
|
||||
|
||||
@Autowired(required = false)
|
||||
private List<HttpRequestInterceptor> requestInterceptors;
|
||||
|
||||
@Autowired(required = false)
|
||||
private List<HttpResponseInterceptor> responseInterceptors;
|
||||
|
||||
/**
|
||||
* Get an instance of {@link DSpaceHttpClientFactory} from the Spring context.
|
||||
* @return the bean instance
|
||||
*/
|
||||
public static DSpaceHttpClientFactory getInstance() {
|
||||
return new DSpace().getSingletonService(DSpaceHttpClientFactory.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build an instance of {@link HttpClient} setting the proxy if configured.
|
||||
*
|
||||
* @return the client
|
||||
*/
|
||||
public CloseableHttpClient build() {
|
||||
return build(HttpClientBuilder.create(), true);
|
||||
}
|
||||
|
||||
/**
|
||||
* return a Builder if an instance of {@link HttpClient} pre-setting the proxy if configured.
|
||||
*
|
||||
* @return the client
|
||||
*/
|
||||
public HttpClientBuilder builder(boolean setProxy) {
|
||||
HttpClientBuilder clientBuilder = HttpClientBuilder.create();
|
||||
if (setProxy) {
|
||||
clientBuilder.setRoutePlanner(proxyRoutePlanner);
|
||||
}
|
||||
getRequestInterceptors().forEach(clientBuilder::addInterceptorLast);
|
||||
getResponseInterceptors().forEach(clientBuilder::addInterceptorLast);
|
||||
return clientBuilder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build an instance of {@link HttpClient} without setting the proxy, even if
|
||||
* configured.
|
||||
*
|
||||
* @return the client
|
||||
*/
|
||||
public CloseableHttpClient buildWithoutProxy() {
|
||||
return build(HttpClientBuilder.create(), false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build an instance of {@link HttpClient} setting the proxy if configured,
|
||||
* disabling automatic retries and setting the maximum total connection.
|
||||
*
|
||||
* @param maxConnTotal the maximum total connection value
|
||||
* @return the client
|
||||
*/
|
||||
public CloseableHttpClient buildWithoutAutomaticRetries(int maxConnTotal) {
|
||||
HttpClientBuilder clientBuilder = HttpClientBuilder.create()
|
||||
.disableAutomaticRetries()
|
||||
.setMaxConnTotal(maxConnTotal);
|
||||
return build(clientBuilder, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build an instance of {@link HttpClient} setting the proxy if configured with
|
||||
* the given request configuration.
|
||||
* @param requestConfig the request configuration
|
||||
* @return the client
|
||||
*/
|
||||
public CloseableHttpClient buildWithRequestConfig(RequestConfig requestConfig) {
|
||||
HttpClientBuilder httpClientBuilder = HttpClientBuilder.create()
|
||||
.setDefaultRequestConfig(requestConfig);
|
||||
return build(httpClientBuilder, true);
|
||||
}
|
||||
|
||||
private CloseableHttpClient build(HttpClientBuilder clientBuilder, boolean setProxy) {
|
||||
if (setProxy) {
|
||||
clientBuilder.setRoutePlanner(proxyRoutePlanner);
|
||||
}
|
||||
getRequestInterceptors().forEach(clientBuilder::addInterceptorLast);
|
||||
getResponseInterceptors().forEach(clientBuilder::addInterceptorLast);
|
||||
return clientBuilder.build();
|
||||
}
|
||||
|
||||
public ConfigurationService getConfigurationService() {
|
||||
return configurationService;
|
||||
}
|
||||
|
||||
public void setConfigurationService(ConfigurationService configurationService) {
|
||||
this.configurationService = configurationService;
|
||||
}
|
||||
|
||||
public List<HttpRequestInterceptor> getRequestInterceptors() {
|
||||
return emptyIfNull(requestInterceptors);
|
||||
}
|
||||
|
||||
public void setRequestInterceptors(List<HttpRequestInterceptor> requestInterceptors) {
|
||||
this.requestInterceptors = requestInterceptors;
|
||||
}
|
||||
|
||||
public List<HttpResponseInterceptor> getResponseInterceptors() {
|
||||
return emptyIfNull(responseInterceptors);
|
||||
}
|
||||
|
||||
public void setResponseInterceptors(List<HttpResponseInterceptor> responseInterceptors) {
|
||||
this.responseInterceptors = responseInterceptors;
|
||||
}
|
||||
|
||||
public DSpaceProxyRoutePlanner getProxyRoutePlanner() {
|
||||
return proxyRoutePlanner;
|
||||
}
|
||||
|
||||
public void setProxyRoutePlanner(DSpaceProxyRoutePlanner proxyRoutePlanner) {
|
||||
this.proxyRoutePlanner = proxyRoutePlanner;
|
||||
}
|
||||
}
|
@@ -0,0 +1,73 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.client;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.HttpException;
|
||||
import org.apache.http.HttpHost;
|
||||
import org.apache.http.HttpRequest;
|
||||
import org.apache.http.impl.conn.DefaultRoutePlanner;
|
||||
import org.apache.http.protocol.HttpContext;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
|
||||
/**
|
||||
* Extension of {@link DefaultRoutePlanner} that determine the proxy based on
|
||||
* the configuration service, ignoring configured hosts.
|
||||
*
|
||||
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
|
||||
*
|
||||
*/
|
||||
public class DSpaceProxyRoutePlanner extends DefaultRoutePlanner {
|
||||
|
||||
private ConfigurationService configurationService;
|
||||
|
||||
public DSpaceProxyRoutePlanner(ConfigurationService configurationService) {
|
||||
super(null);
|
||||
this.configurationService = configurationService;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected HttpHost determineProxy(HttpHost target, HttpRequest request, HttpContext context) throws HttpException {
|
||||
if (isTargetHostConfiguredToBeIgnored(target)) {
|
||||
return null;
|
||||
}
|
||||
String proxyHost = configurationService.getProperty("http.proxy.host");
|
||||
String proxyPort = configurationService.getProperty("http.proxy.port");
|
||||
if (StringUtils.isAnyBlank(proxyHost, proxyPort)) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
return new HttpHost(proxyHost, Integer.parseInt(proxyPort), "http");
|
||||
} catch (NumberFormatException e) {
|
||||
throw new RuntimeException("Invalid proxy port configuration: " + proxyPort);
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isTargetHostConfiguredToBeIgnored(HttpHost target) {
|
||||
String[] hostsToIgnore = configurationService.getArrayProperty("http.proxy.hosts-to-ignore");
|
||||
if (ArrayUtils.isEmpty(hostsToIgnore)) {
|
||||
return false;
|
||||
}
|
||||
return Arrays.stream(hostsToIgnore)
|
||||
.anyMatch(host -> matchesHost(host, target.getHostName()));
|
||||
}
|
||||
|
||||
private boolean matchesHost(String hostPattern, String hostName) {
|
||||
if (hostName.equals(hostPattern)) {
|
||||
return true;
|
||||
} else if (hostPattern.startsWith("*")) {
|
||||
return hostName.endsWith(StringUtils.removeStart(hostPattern, "*"));
|
||||
} else if (hostPattern.endsWith("*")) {
|
||||
return hostName.startsWith(StringUtils.removeEnd(hostPattern, "*"));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
@@ -352,7 +352,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
|
||||
/**
|
||||
* Create the 'collections' file. List handles of all Collections which
|
||||
* contain this Item. The "owning" Collection is listed first.
|
||||
* contain this Item. The "owning" Collection is listed first.
|
||||
*
|
||||
* @param item list collections holding this Item.
|
||||
* @param destDir write the file here.
|
||||
@@ -363,12 +363,14 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
File outFile = new File(destDir, "collections");
|
||||
if (outFile.createNewFile()) {
|
||||
try (PrintWriter out = new PrintWriter(new FileWriter(outFile))) {
|
||||
String ownerHandle = item.getOwningCollection().getHandle();
|
||||
out.println(ownerHandle);
|
||||
Collection owningCollection = item.getOwningCollection();
|
||||
// The owning collection is null for workspace and workflow items
|
||||
if (owningCollection != null) {
|
||||
out.println(owningCollection.getHandle());
|
||||
}
|
||||
for (Collection collection : item.getCollections()) {
|
||||
String collectionHandle = collection.getHandle();
|
||||
if (!collectionHandle.equals(ownerHandle)) {
|
||||
out.println(collectionHandle);
|
||||
if (!collection.equals(owningCollection)) {
|
||||
out.println(collection.getHandle());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -22,6 +22,7 @@ import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.tika.Tika;
|
||||
import org.dspace.app.itemimport.factory.ItemImportServiceFactory;
|
||||
@@ -333,33 +334,38 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
||||
protected void readZip(Context context, ItemImportService itemImportService) throws Exception {
|
||||
Optional<InputStream> optionalFileStream = Optional.empty();
|
||||
Optional<InputStream> validationFileStream = Optional.empty();
|
||||
if (!remoteUrl) {
|
||||
// manage zip via upload
|
||||
optionalFileStream = handler.getFileStream(context, zipfilename);
|
||||
validationFileStream = handler.getFileStream(context, zipfilename);
|
||||
} else {
|
||||
// manage zip via remote url
|
||||
optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||
validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||
}
|
||||
|
||||
if (validationFileStream.isPresent()) {
|
||||
// validate zip file
|
||||
if (validationFileStream.isPresent()) {
|
||||
validateZip(validationFileStream.get());
|
||||
try {
|
||||
if (!remoteUrl) {
|
||||
// manage zip via upload
|
||||
optionalFileStream = handler.getFileStream(context, zipfilename);
|
||||
validationFileStream = handler.getFileStream(context, zipfilename);
|
||||
} else {
|
||||
// manage zip via remote url
|
||||
optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||
validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||
}
|
||||
|
||||
workFile = new File(itemImportService.getTempWorkDir() + File.separator
|
||||
+ zipfilename + "-" + context.getCurrentUser().getID());
|
||||
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Error reading file, the file couldn't be found for filename: " + zipfilename);
|
||||
}
|
||||
if (validationFileStream.isPresent()) {
|
||||
// validate zip file
|
||||
if (validationFileStream.isPresent()) {
|
||||
validateZip(validationFileStream.get());
|
||||
}
|
||||
|
||||
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
|
||||
+ File.separator + context.getCurrentUser().getID());
|
||||
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
|
||||
workFile = new File(itemImportService.getTempWorkDir() + File.separator
|
||||
+ zipfilename + "-" + context.getCurrentUser().getID());
|
||||
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Error reading file, the file couldn't be found for filename: " + zipfilename);
|
||||
}
|
||||
|
||||
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
|
||||
+ File.separator + context.getCurrentUser().getID());
|
||||
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
|
||||
} finally {
|
||||
optionalFileStream.ifPresent(IOUtils::closeQuietly);
|
||||
validationFileStream.ifPresent(IOUtils::closeQuietly);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -17,6 +17,7 @@ import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.app.itemimport.service.ItemImportService;
|
||||
import org.dspace.content.Collection;
|
||||
@@ -111,7 +112,11 @@ public class ItemImportCLI extends ItemImport {
|
||||
|
||||
// validate zip file
|
||||
InputStream validationFileStream = new FileInputStream(myZipFile);
|
||||
validateZip(validationFileStream);
|
||||
try {
|
||||
validateZip(validationFileStream);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(validationFileStream);
|
||||
}
|
||||
|
||||
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
|
||||
+ File.separator + context.getCurrentUser().getID());
|
||||
@@ -120,22 +125,28 @@ public class ItemImportCLI extends ItemImport {
|
||||
} else {
|
||||
// manage zip via remote url
|
||||
Optional<InputStream> optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||
if (optionalFileStream.isPresent()) {
|
||||
// validate zip file via url
|
||||
Optional<InputStream> validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||
if (validationFileStream.isPresent()) {
|
||||
validateZip(validationFileStream.get());
|
||||
}
|
||||
Optional<InputStream> validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||
try {
|
||||
if (optionalFileStream.isPresent()) {
|
||||
// validate zip file via url
|
||||
|
||||
workFile = new File(itemImportService.getTempWorkDir() + File.separator
|
||||
+ zipfilename + "-" + context.getCurrentUser().getID());
|
||||
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
|
||||
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
|
||||
+ File.separator + context.getCurrentUser().getID());
|
||||
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Error reading file, the file couldn't be found for filename: " + zipfilename);
|
||||
if (validationFileStream.isPresent()) {
|
||||
validateZip(validationFileStream.get());
|
||||
}
|
||||
|
||||
workFile = new File(itemImportService.getTempWorkDir() + File.separator
|
||||
+ zipfilename + "-" + context.getCurrentUser().getID());
|
||||
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
|
||||
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
|
||||
+ File.separator + context.getCurrentUser().getID());
|
||||
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Error reading file, the file couldn't be found for filename: " + zipfilename);
|
||||
}
|
||||
} finally {
|
||||
optionalFileStream.ifPresent(IOUtils::closeQuietly);
|
||||
validationFileStream.ifPresent(IOUtils::closeQuietly);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -29,6 +29,7 @@ import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Path;
|
||||
import java.sql.SQLException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
@@ -48,7 +49,6 @@ import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipFile;
|
||||
import javax.mail.MessagingException;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.transform.TransformerException;
|
||||
import javax.xml.xpath.XPath;
|
||||
@@ -67,6 +67,7 @@ import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.itemimport.service.ItemImportService;
|
||||
import org.dspace.app.util.LocalSchemaFilenameFilter;
|
||||
import org.dspace.app.util.RelationshipUtils;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.authorize.ResourcePolicy;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
@@ -179,6 +180,8 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
@Autowired(required = true)
|
||||
protected MetadataValueService metadataValueService;
|
||||
|
||||
protected DocumentBuilder builder;
|
||||
|
||||
protected String tempWorkDir;
|
||||
|
||||
protected boolean isTest = false;
|
||||
@@ -742,15 +745,22 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
myitem = wi.getItem();
|
||||
}
|
||||
|
||||
// normalize and validate path to make sure itemname doesn't contain path traversal
|
||||
Path itemPath = new File(path + File.separatorChar + itemname + File.separatorChar)
|
||||
.toPath().normalize();
|
||||
if (!itemPath.startsWith(path)) {
|
||||
throw new IOException("Illegal item metadata path: '" + itemPath);
|
||||
}
|
||||
// Normalization chops off the last separator, and we need to put it back
|
||||
String itemPathDir = itemPath.toString() + File.separatorChar;
|
||||
|
||||
// now fill out dublin core for item
|
||||
loadMetadata(c, myitem, path + File.separatorChar + itemname
|
||||
+ File.separatorChar);
|
||||
loadMetadata(c, myitem, itemPathDir);
|
||||
|
||||
// and the bitstreams from the contents file
|
||||
// process contents file, add bistreams and bundles, return any
|
||||
// non-standard permissions
|
||||
List<String> options = processContentsFile(c, myitem, path
|
||||
+ File.separatorChar + itemname, "contents");
|
||||
List<String> options = processContentsFile(c, myitem, itemPathDir, "contents");
|
||||
|
||||
if (useWorkflow) {
|
||||
// don't process handle file
|
||||
@@ -768,8 +778,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
}
|
||||
} else {
|
||||
// only process handle file if not using workflow system
|
||||
String myhandle = processHandleFile(c, myitem, path
|
||||
+ File.separatorChar + itemname, "handle");
|
||||
String myhandle = processHandleFile(c, myitem, itemPathDir, "handle");
|
||||
|
||||
// put item in system
|
||||
if (!isTest) {
|
||||
@@ -1001,6 +1010,34 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures a file path does not attempt to access files outside the designated parent directory.
|
||||
*
|
||||
* @param parentDir The absolute path to the parent directory that should contain the file
|
||||
* @param fileName The name or path of the file to validate
|
||||
* @throws IOException If an error occurs while resolving canonical paths, or the file path attempts
|
||||
* to access a location outside the parent directory
|
||||
*/
|
||||
private void validateFilePath(String parentDir, String fileName) throws IOException {
|
||||
File parent = new File(parentDir);
|
||||
File file = new File(fileName);
|
||||
|
||||
// If the fileName is not an absolute path, we resolve it against the parentDir
|
||||
if (!file.isAbsolute()) {
|
||||
file = new File(parent, fileName);
|
||||
}
|
||||
|
||||
String parentCanonicalPath = parent.getCanonicalPath();
|
||||
String fileCanonicalPath = file.getCanonicalPath();
|
||||
|
||||
if (!fileCanonicalPath.startsWith(parentCanonicalPath)) {
|
||||
log.error("File path outside of canonical root requested: fileCanonicalPath={} does not begin " +
|
||||
"with parentCanonicalPath={}", fileCanonicalPath, parentCanonicalPath);
|
||||
throw new IOException("Illegal file path '" + fileName + "' encountered. This references a path " +
|
||||
"outside of the import package. Please see the system logs for more details.");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the collections file inside the item directory. If there
|
||||
* is one and it is not empty return a list of collections in
|
||||
@@ -1201,6 +1238,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
sDescription = sDescription.replaceFirst("description:", "");
|
||||
}
|
||||
|
||||
validateFilePath(path, sFilePath);
|
||||
registerBitstream(c, i, iAssetstore, sFilePath, sBundle, sDescription);
|
||||
logInfo("\tRegistering Bitstream: " + sFilePath
|
||||
+ "\tAssetstore: " + iAssetstore
|
||||
@@ -1414,6 +1452,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
return;
|
||||
}
|
||||
|
||||
validateFilePath(path, fileName);
|
||||
String fullpath = path + File.separatorChar + fileName;
|
||||
|
||||
// get an input stream
|
||||
@@ -1888,9 +1927,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
*/
|
||||
protected Document loadXML(String filename) throws IOException,
|
||||
ParserConfigurationException, SAXException {
|
||||
DocumentBuilder builder = DocumentBuilderFactory.newInstance()
|
||||
.newDocumentBuilder();
|
||||
|
||||
DocumentBuilder builder = XMLUtils.getDocumentBuilder();
|
||||
return builder.parse(new File(filename));
|
||||
}
|
||||
|
||||
|
@@ -23,8 +23,6 @@ import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.transform.Transformer;
|
||||
import javax.xml.transform.TransformerConfigurationException;
|
||||
@@ -33,6 +31,7 @@ import javax.xml.transform.TransformerFactory;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.util.LocalSchemaFilenameFilter;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
@@ -52,7 +51,6 @@ public class ItemArchive {
|
||||
|
||||
public static final String DUBLIN_CORE_XML = "dublin_core.xml";
|
||||
|
||||
protected static DocumentBuilder builder = null;
|
||||
protected Transformer transformer = null;
|
||||
|
||||
protected List<DtoMetadata> dtomList = null;
|
||||
@@ -95,14 +93,14 @@ public class ItemArchive {
|
||||
InputStream is = null;
|
||||
try {
|
||||
is = new FileInputStream(new File(dir, DUBLIN_CORE_XML));
|
||||
itarch.dtomList = MetadataUtilities.loadDublinCore(getDocumentBuilder(), is);
|
||||
itarch.dtomList = MetadataUtilities.loadDublinCore(XMLUtils.getDocumentBuilder(), is);
|
||||
|
||||
//The code to search for local schema files was copied from org.dspace.app.itemimport
|
||||
// .ItemImportServiceImpl.java
|
||||
File file[] = dir.listFiles(new LocalSchemaFilenameFilter());
|
||||
for (int i = 0; i < file.length; i++) {
|
||||
is = new FileInputStream(file[i]);
|
||||
itarch.dtomList.addAll(MetadataUtilities.loadDublinCore(getDocumentBuilder(), is));
|
||||
itarch.dtomList.addAll(MetadataUtilities.loadDublinCore(XMLUtils.getDocumentBuilder(), is));
|
||||
}
|
||||
} finally {
|
||||
if (is != null) {
|
||||
@@ -126,14 +124,6 @@ public class ItemArchive {
|
||||
return itarch;
|
||||
}
|
||||
|
||||
protected static DocumentBuilder getDocumentBuilder()
|
||||
throws ParserConfigurationException {
|
||||
if (builder == null) {
|
||||
builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Getter for Transformer
|
||||
*
|
||||
@@ -318,7 +308,7 @@ public class ItemArchive {
|
||||
|
||||
try {
|
||||
out = new FileOutputStream(new File(dir, "dublin_core.xml"));
|
||||
Document doc = MetadataUtilities.writeDublinCore(getDocumentBuilder(), undoDtomList);
|
||||
Document doc = MetadataUtilities.writeDublinCore(XMLUtils.getDocumentBuilder(), undoDtomList);
|
||||
MetadataUtilities.writeDocument(doc, getTransformer(), out);
|
||||
|
||||
// if undo has delete bitstream
|
||||
|
@@ -19,6 +19,7 @@ import java.util.TreeMap;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.scripts.DSpaceRunnable.StepResult;
|
||||
@@ -314,7 +315,7 @@ public class ScriptLauncher {
|
||||
String config = kernelImpl.getConfigurationService().getProperty("dspace.dir") +
|
||||
System.getProperty("file.separator") + "config" +
|
||||
System.getProperty("file.separator") + "launcher.xml";
|
||||
SAXBuilder saxBuilder = new SAXBuilder();
|
||||
SAXBuilder saxBuilder = XMLUtils.getSAXBuilder();
|
||||
Document doc = null;
|
||||
try {
|
||||
doc = saxBuilder.build(config);
|
||||
|
@@ -18,6 +18,7 @@ import java.nio.charset.StandardCharsets;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.poi.util.IOUtils;
|
||||
import org.apache.tika.Tika;
|
||||
import org.apache.tika.exception.TikaException;
|
||||
import org.apache.tika.metadata.Metadata;
|
||||
@@ -37,6 +38,8 @@ import org.xml.sax.SAXException;
|
||||
public class TikaTextExtractionFilter
|
||||
extends MediaFilter {
|
||||
private final static Logger log = LogManager.getLogger();
|
||||
private static final int DEFAULT_MAX_CHARS = 100_000;
|
||||
private static final int DEFAULT_MAX_ARRAY = 100_000_000;
|
||||
|
||||
@Override
|
||||
public String getFilteredName(String oldFilename) {
|
||||
@@ -70,9 +73,12 @@ public class TikaTextExtractionFilter
|
||||
}
|
||||
|
||||
// Not using temporary file. We'll use Tika's default in-memory parsing.
|
||||
// Get maximum characters to extract. Default is 100,000 chars, which is also Tika's default setting.
|
||||
String extractedText;
|
||||
int maxChars = configurationService.getIntProperty("textextractor.max-chars", 100000);
|
||||
// Get maximum characters to extract. Default is 100,000 chars, which is also Tika's default setting.
|
||||
int maxChars = configurationService.getIntProperty("textextractor.max-chars", DEFAULT_MAX_CHARS);
|
||||
// Get maximum size of structure that Tika will try to buffer.
|
||||
int maxArray = configurationService.getIntProperty("textextractor.max-array", DEFAULT_MAX_ARRAY);
|
||||
IOUtils.setByteArrayMaxOverride(maxArray);
|
||||
try {
|
||||
// Use Tika to extract text from input. Tika will automatically detect the file type.
|
||||
Tika tika = new Tika();
|
||||
@@ -80,13 +86,13 @@ public class TikaTextExtractionFilter
|
||||
extractedText = tika.parseToString(source);
|
||||
} catch (IOException e) {
|
||||
System.err.format("Unable to extract text from bitstream in Item %s%n", currentItem.getID().toString());
|
||||
e.printStackTrace();
|
||||
e.printStackTrace(System.err);
|
||||
log.error("Unable to extract text from bitstream in Item {}", currentItem.getID().toString(), e);
|
||||
throw e;
|
||||
} catch (OutOfMemoryError oe) {
|
||||
System.err.format("OutOfMemoryError occurred when extracting text from bitstream in Item %s. " +
|
||||
"You may wish to enable 'textextractor.use-temp-file'.%n", currentItem.getID().toString());
|
||||
oe.printStackTrace();
|
||||
oe.printStackTrace(System.err);
|
||||
log.error("OutOfMemoryError occurred when extracting text from bitstream in Item {}. " +
|
||||
"You may wish to enable 'textextractor.use-temp-file'.", currentItem.getID().toString(), oe);
|
||||
throw oe;
|
||||
@@ -138,7 +144,7 @@ public class TikaTextExtractionFilter
|
||||
@Override
|
||||
public void characters(char[] ch, int start, int length) throws SAXException {
|
||||
try {
|
||||
writer.append(new String(ch), start, length);
|
||||
writer.append(new String(ch, start, length));
|
||||
} catch (IOException e) {
|
||||
String errorMsg = String.format("Could not append to temporary file at %s " +
|
||||
"when performing text extraction",
|
||||
@@ -156,7 +162,7 @@ public class TikaTextExtractionFilter
|
||||
@Override
|
||||
public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException {
|
||||
try {
|
||||
writer.append(new String(ch), start, length);
|
||||
writer.append(new String(ch, start, length));
|
||||
} catch (IOException e) {
|
||||
String errorMsg = String.format("Could not append to temporary file at %s " +
|
||||
"when performing text extraction",
|
||||
@@ -167,6 +173,10 @@ public class TikaTextExtractionFilter
|
||||
}
|
||||
});
|
||||
|
||||
ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
int maxArray = configurationService.getIntProperty("textextractor.max-array", DEFAULT_MAX_ARRAY);
|
||||
IOUtils.setByteArrayMaxOverride(maxArray);
|
||||
|
||||
AutoDetectParser parser = new AutoDetectParser();
|
||||
Metadata metadata = new Metadata();
|
||||
// parse our source InputStream using the above custom handler
|
||||
|
@@ -18,6 +18,7 @@ import javax.xml.parsers.ParserConfigurationException;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.sfx.service.SFXFileReaderService;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.content.DCPersonName;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataValue;
|
||||
@@ -79,9 +80,9 @@ public class SFXFileReaderServiceImpl implements SFXFileReaderService {
|
||||
log.info("Parsing XML file... " + fileName);
|
||||
DocumentBuilder docBuilder;
|
||||
Document doc = null;
|
||||
DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
|
||||
docBuilderFactory.setIgnoringElementContentWhitespace(true);
|
||||
try {
|
||||
DocumentBuilderFactory docBuilderFactory = XMLUtils.getDocumentBuilderFactory();
|
||||
docBuilderFactory.setIgnoringElementContentWhitespace(true);
|
||||
docBuilder = docBuilderFactory.newDocumentBuilder();
|
||||
} catch (ParserConfigurationException e) {
|
||||
log.error("Wrong parser configuration: " + e.getMessage());
|
||||
|
@@ -17,15 +17,15 @@ import javax.annotation.PostConstruct;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.HttpStatus;
|
||||
import org.apache.http.client.config.RequestConfig;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.client.DSpaceHttpClientFactory;
|
||||
import org.dspace.app.sherpa.v2.SHERPAPublisherResponse;
|
||||
import org.dspace.app.sherpa.v2.SHERPAResponse;
|
||||
import org.dspace.app.sherpa.v2.SHERPAUtils;
|
||||
@@ -45,8 +45,6 @@ import org.springframework.cache.annotation.Cacheable;
|
||||
*/
|
||||
public class SHERPAService {
|
||||
|
||||
private CloseableHttpClient client = null;
|
||||
|
||||
private int maxNumberOfTries;
|
||||
private long sleepBetweenTimeouts;
|
||||
private int timeout = 5000;
|
||||
@@ -59,19 +57,6 @@ public class SHERPAService {
|
||||
@Autowired
|
||||
ConfigurationService configurationService;
|
||||
|
||||
/**
|
||||
* Create a new HTTP builder with sensible defaults in constructor
|
||||
*/
|
||||
public SHERPAService() {
|
||||
HttpClientBuilder builder = HttpClientBuilder.create();
|
||||
// httpclient 4.3+ doesn't appear to have any sensible defaults any more. Setting conservative defaults as
|
||||
// not to hammer the SHERPA service too much.
|
||||
client = builder
|
||||
.disableAutomaticRetries()
|
||||
.setMaxConnTotal(5)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Complete initialization of the Bean.
|
||||
*/
|
||||
@@ -132,46 +117,47 @@ public class SHERPAService {
|
||||
timeout,
|
||||
sleepBetweenTimeouts));
|
||||
|
||||
try {
|
||||
try (CloseableHttpClient client = DSpaceHttpClientFactory.getInstance().buildWithoutAutomaticRetries(5)) {
|
||||
Thread.sleep(sleepBetweenTimeouts);
|
||||
|
||||
// Construct a default HTTP method (first result)
|
||||
method = constructHttpGet(type, field, predicate, value, start, limit);
|
||||
|
||||
// Execute the method
|
||||
HttpResponse response = client.execute(method);
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
try (CloseableHttpResponse response = client.execute(method)) {
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
|
||||
log.debug(response.getStatusLine().getStatusCode() + ": "
|
||||
+ response.getStatusLine().getReasonPhrase());
|
||||
log.debug(response.getStatusLine().getStatusCode() + ": "
|
||||
+ response.getStatusLine().getReasonPhrase());
|
||||
|
||||
if (statusCode != HttpStatus.SC_OK) {
|
||||
sherpaResponse = new SHERPAPublisherResponse("SHERPA/RoMEO return not OK status: "
|
||||
+ statusCode);
|
||||
String errorBody = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
log.error("Error from SHERPA HTTP request: " + errorBody);
|
||||
}
|
||||
|
||||
HttpEntity responseBody = response.getEntity();
|
||||
|
||||
// If the response body is valid, pass to SHERPAResponse for parsing as JSON
|
||||
if (null != responseBody) {
|
||||
log.debug("Non-null SHERPA resonse received for query of " + value);
|
||||
InputStream content = null;
|
||||
try {
|
||||
content = responseBody.getContent();
|
||||
sherpaResponse =
|
||||
new SHERPAPublisherResponse(content, SHERPAPublisherResponse.SHERPAFormat.JSON);
|
||||
} catch (IOException e) {
|
||||
log.error("Encountered exception while contacting SHERPA/RoMEO: " + e.getMessage(), e);
|
||||
} finally {
|
||||
if (content != null) {
|
||||
content.close();
|
||||
}
|
||||
if (statusCode != HttpStatus.SC_OK) {
|
||||
sherpaResponse = new SHERPAPublisherResponse("SHERPA/RoMEO return not OK status: "
|
||||
+ statusCode);
|
||||
String errorBody = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
log.error("Error from SHERPA HTTP request: " + errorBody);
|
||||
}
|
||||
|
||||
HttpEntity responseBody = response.getEntity();
|
||||
|
||||
// If the response body is valid, pass to SHERPAResponse for parsing as JSON
|
||||
if (null != responseBody) {
|
||||
log.debug("Non-null SHERPA response received for query of " + value);
|
||||
InputStream content = null;
|
||||
try {
|
||||
content = responseBody.getContent();
|
||||
sherpaResponse =
|
||||
new SHERPAPublisherResponse(content, SHERPAPublisherResponse.SHERPAFormat.JSON);
|
||||
} catch (IOException e) {
|
||||
log.error("Encountered exception while contacting SHERPA/RoMEO: " + e.getMessage(), e);
|
||||
} finally {
|
||||
if (content != null) {
|
||||
content.close();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log.debug("Empty SHERPA response body for query on " + value);
|
||||
sherpaResponse = new SHERPAPublisherResponse("SHERPA/RoMEO returned no response");
|
||||
}
|
||||
} else {
|
||||
log.debug("Empty SHERPA response body for query on " + value);
|
||||
sherpaResponse = new SHERPAPublisherResponse("SHERPA/RoMEO returned no response");
|
||||
}
|
||||
} catch (URISyntaxException e) {
|
||||
String errorMessage = "Error building SHERPA v2 API URI: " + e.getMessage();
|
||||
@@ -235,45 +221,46 @@ public class SHERPAService {
|
||||
timeout,
|
||||
sleepBetweenTimeouts));
|
||||
|
||||
try {
|
||||
try (CloseableHttpClient client = DSpaceHttpClientFactory.getInstance().buildWithoutAutomaticRetries(5)) {
|
||||
Thread.sleep(sleepBetweenTimeouts);
|
||||
|
||||
// Construct a default HTTP method (first result)
|
||||
method = constructHttpGet(type, field, predicate, value, start, limit);
|
||||
|
||||
// Execute the method
|
||||
HttpResponse response = client.execute(method);
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
try (CloseableHttpResponse response = client.execute(method)) {
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
|
||||
log.debug(response.getStatusLine().getStatusCode() + ": "
|
||||
+ response.getStatusLine().getReasonPhrase());
|
||||
log.debug(response.getStatusLine().getStatusCode() + ": "
|
||||
+ response.getStatusLine().getReasonPhrase());
|
||||
|
||||
if (statusCode != HttpStatus.SC_OK) {
|
||||
sherpaResponse = new SHERPAResponse("SHERPA/RoMEO return not OK status: "
|
||||
+ statusCode);
|
||||
String errorBody = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
log.error("Error from SHERPA HTTP request: " + errorBody);
|
||||
}
|
||||
|
||||
HttpEntity responseBody = response.getEntity();
|
||||
|
||||
// If the response body is valid, pass to SHERPAResponse for parsing as JSON
|
||||
if (null != responseBody) {
|
||||
log.debug("Non-null SHERPA resonse received for query of " + value);
|
||||
InputStream content = null;
|
||||
try {
|
||||
content = responseBody.getContent();
|
||||
sherpaResponse = new SHERPAResponse(content, SHERPAResponse.SHERPAFormat.JSON);
|
||||
} catch (IOException e) {
|
||||
log.error("Encountered exception while contacting SHERPA/RoMEO: " + e.getMessage(), e);
|
||||
} finally {
|
||||
if (content != null) {
|
||||
content.close();
|
||||
}
|
||||
if (statusCode != HttpStatus.SC_OK) {
|
||||
sherpaResponse = new SHERPAResponse("SHERPA/RoMEO return not OK status: "
|
||||
+ statusCode);
|
||||
String errorBody = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
log.error("Error from SHERPA HTTP request: " + errorBody);
|
||||
}
|
||||
|
||||
HttpEntity responseBody = response.getEntity();
|
||||
|
||||
// If the response body is valid, pass to SHERPAResponse for parsing as JSON
|
||||
if (null != responseBody) {
|
||||
log.debug("Non-null SHERPA response received for query of " + value);
|
||||
InputStream content = null;
|
||||
try {
|
||||
content = responseBody.getContent();
|
||||
sherpaResponse = new SHERPAResponse(content, SHERPAResponse.SHERPAFormat.JSON);
|
||||
} catch (IOException e) {
|
||||
log.error("Encountered exception while contacting SHERPA/RoMEO: " + e.getMessage(), e);
|
||||
} finally {
|
||||
if (content != null) {
|
||||
content.close();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log.debug("Empty SHERPA response body for query on " + value);
|
||||
sherpaResponse = new SHERPAResponse("SHERPA/RoMEO returned no response");
|
||||
}
|
||||
} else {
|
||||
log.debug("Empty SHERPA response body for query on " + value);
|
||||
sherpaResponse = new SHERPAResponse("SHERPA/RoMEO returned no response");
|
||||
}
|
||||
} catch (URISyntaxException e) {
|
||||
String errorMessage = "Error building SHERPA v2 API URI: " + e.getMessage();
|
||||
@@ -283,7 +270,7 @@ public class SHERPAService {
|
||||
String errorMessage = "Encountered exception while contacting SHERPA/RoMEO: " + e.getMessage();
|
||||
log.error(errorMessage, e);
|
||||
sherpaResponse = new SHERPAResponse(errorMessage);
|
||||
} catch (InterruptedException e) {
|
||||
} catch (InterruptedException e) {
|
||||
String errorMessage = "Encountered exception while sleeping thread: " + e.getMessage();
|
||||
log.error(errorMessage, e);
|
||||
sherpaResponse = new SHERPAResponse(errorMessage);
|
||||
|
@@ -7,6 +7,8 @@
|
||||
*/
|
||||
package org.dspace.app.sitemap;
|
||||
|
||||
import static org.dspace.discovery.SearchUtils.RESOURCE_TYPE_FIELD;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
@@ -189,7 +191,8 @@ public class GenerateSitemaps {
|
||||
try {
|
||||
DiscoverQuery discoveryQuery = new DiscoverQuery();
|
||||
discoveryQuery.setMaxResults(PAGE_SIZE);
|
||||
discoveryQuery.setQuery("search.resourcetype:Community");
|
||||
discoveryQuery.setQuery("*:*");
|
||||
discoveryQuery.addFilterQueries(RESOURCE_TYPE_FIELD + ":Community");
|
||||
do {
|
||||
discoveryQuery.setStart(offset);
|
||||
DiscoverResult discoverResult = searchService.search(c, discoveryQuery);
|
||||
@@ -213,7 +216,8 @@ public class GenerateSitemaps {
|
||||
offset = 0;
|
||||
discoveryQuery = new DiscoverQuery();
|
||||
discoveryQuery.setMaxResults(PAGE_SIZE);
|
||||
discoveryQuery.setQuery("search.resourcetype:Collection");
|
||||
discoveryQuery.setQuery("*:*");
|
||||
discoveryQuery.addFilterQueries(RESOURCE_TYPE_FIELD + ":Collection");
|
||||
do {
|
||||
discoveryQuery.setStart(offset);
|
||||
DiscoverResult discoverResult = searchService.search(c, discoveryQuery);
|
||||
@@ -237,7 +241,8 @@ public class GenerateSitemaps {
|
||||
offset = 0;
|
||||
discoveryQuery = new DiscoverQuery();
|
||||
discoveryQuery.setMaxResults(PAGE_SIZE);
|
||||
discoveryQuery.setQuery("search.resourcetype:Item");
|
||||
discoveryQuery.setQuery("*:*");
|
||||
discoveryQuery.addFilterQueries(RESOURCE_TYPE_FIELD + ":Item");
|
||||
discoveryQuery.addSearchField("search.entitytype");
|
||||
do {
|
||||
|
||||
|
@@ -98,7 +98,8 @@ public class SolrDatabaseResyncCli extends DSpaceRunnable<SolrDatabaseResyncCliS
|
||||
|
||||
private void performStatusUpdate(Context context) throws SearchServiceException, SolrServerException, IOException {
|
||||
SolrQuery solrQuery = new SolrQuery();
|
||||
solrQuery.setQuery(STATUS_FIELD + ":" + STATUS_FIELD_PREDB);
|
||||
solrQuery.setQuery("*:*");
|
||||
solrQuery.addFilterQuery(STATUS_FIELD + ":" + STATUS_FIELD_PREDB);
|
||||
solrQuery.addFilterQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":" + IndexableItem.TYPE);
|
||||
String dateRangeFilter = SearchUtils.LAST_INDEXED_FIELD + ":[* TO " + maxTime + "]";
|
||||
logDebugAndOut("Date range filter used; " + dateRangeFilter);
|
||||
|
@@ -15,7 +15,6 @@ import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.FactoryConfigurationError;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
@@ -118,15 +117,17 @@ public class DCInputsReader {
|
||||
formDefns = new HashMap<String, List<List<Map<String, String>>>>();
|
||||
valuePairs = new HashMap<String, List<String>>();
|
||||
|
||||
String uri = "file:" + new File(fileName).getAbsolutePath();
|
||||
File inputFile = new File(fileName);
|
||||
String inputFileDir = inputFile.toPath().normalize().getParent().toString();
|
||||
|
||||
String uri = "file:" + inputFile.getAbsolutePath();
|
||||
|
||||
try {
|
||||
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
|
||||
factory.setValidating(false);
|
||||
factory.setIgnoringComments(true);
|
||||
factory.setIgnoringElementContentWhitespace(true);
|
||||
|
||||
DocumentBuilder db = factory.newDocumentBuilder();
|
||||
// This document builder will *not* disable external
|
||||
// entities as they can be useful in managing large forms, but
|
||||
// it will restrict them to be within the directory that the
|
||||
// current input form XML file exists (or a sub-directory)
|
||||
DocumentBuilder db = XMLUtils.getTrustedDocumentBuilder(inputFileDir);
|
||||
Document doc = db.parse(uri);
|
||||
doNodes(doc);
|
||||
checkValues();
|
||||
@@ -379,7 +380,7 @@ public class DCInputsReader {
|
||||
}
|
||||
// sanity check number of fields
|
||||
if (fields.size() < 1) {
|
||||
throw new DCInputsReaderException("Form " + formName + "row " + rowIdx + " has no fields");
|
||||
throw new DCInputsReaderException("Form " + formName + ", row " + rowIdx + " has no fields");
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -11,7 +11,6 @@ import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
@@ -64,20 +63,36 @@ public class InitializeEntities {
|
||||
*/
|
||||
public static void main(String[] argv) throws SQLException, AuthorizeException, ParseException {
|
||||
InitializeEntities initializeEntities = new InitializeEntities();
|
||||
// Set up command-line options and parse arguments
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
Options options = createCommandLineOptions();
|
||||
CommandLine line = parser.parse(options,argv);
|
||||
String fileLocation = getFileLocationFromCommandLine(line);
|
||||
// First of all, check if the help option was entered or a required argument is missing
|
||||
checkHelpEntered(options, line);
|
||||
// Get the file location from the command line
|
||||
String fileLocation = getFileLocationFromCommandLine(line);
|
||||
// Run the script
|
||||
initializeEntities.run(fileLocation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the help option was entered or a required argument is missing. If so, print help and exit.
|
||||
* @param options the defined command-line options
|
||||
* @param line the parsed command-line arguments
|
||||
*/
|
||||
private static void checkHelpEntered(Options options, CommandLine line) {
|
||||
if (line.hasOption("h")) {
|
||||
if (line.hasOption("h") || !line.hasOption("f")) {
|
||||
HelpFormatter formatter = new HelpFormatter();
|
||||
formatter.printHelp("Intialize Entities", options);
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the file path from the command-line argument. Exits with exit code 1 if no file argument was entered.
|
||||
* @param line the parsed command-line arguments
|
||||
* @return the file path
|
||||
*/
|
||||
private static String getFileLocationFromCommandLine(CommandLine line) {
|
||||
String query = line.getOptionValue("f");
|
||||
if (StringUtils.isEmpty(query)) {
|
||||
@@ -88,13 +103,25 @@ public class InitializeEntities {
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the command-line options
|
||||
* @return the command-line options
|
||||
*/
|
||||
protected static Options createCommandLineOptions() {
|
||||
Options options = new Options();
|
||||
options.addOption("f", "file", true, "the location for the file containing the xml data");
|
||||
options.addOption("f", "file", true, "the path to the file containing the " +
|
||||
"relationship definitions (e.g. ${dspace.dir}/config/entities/relationship-types.xml)");
|
||||
options.addOption("h", "help", false, "print this message");
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Run the script for the given file location
|
||||
* @param fileLocation the file location
|
||||
* @throws SQLException If something goes wrong initializing context or inserting relationship types
|
||||
* @throws AuthorizeException If the script user fails to authorize while inserting relationship types
|
||||
*/
|
||||
private void run(String fileLocation) throws SQLException, AuthorizeException {
|
||||
Context context = new Context();
|
||||
context.turnOffAuthorisationSystem();
|
||||
@@ -102,11 +129,18 @@ public class InitializeEntities {
|
||||
context.complete();
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the XML file at fileLocation to create relationship types in the database
|
||||
* @param context DSpace context
|
||||
* @param fileLocation the full or relative file path to the relationship types XML
|
||||
* @throws AuthorizeException If the script user fails to authorize while inserting relationship types
|
||||
*/
|
||||
private void parseXMLToRelations(Context context, String fileLocation) throws AuthorizeException {
|
||||
try {
|
||||
File fXmlFile = new File(fileLocation);
|
||||
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
|
||||
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
|
||||
// This XML builder will allow external entities, so the relationship types XML should
|
||||
// be considered trusted by administrators
|
||||
DocumentBuilder dBuilder = XMLUtils.getTrustedDocumentBuilder();
|
||||
Document doc = dBuilder.parse(fXmlFile);
|
||||
|
||||
doc.getDocumentElement().normalize();
|
||||
@@ -158,15 +192,15 @@ public class InitializeEntities {
|
||||
|
||||
for (int j = 0; j < leftCardinalityList.getLength(); j++) {
|
||||
Node node = leftCardinalityList.item(j);
|
||||
leftCardinalityMin = getString(leftCardinalityMin,(Element) node, "min");
|
||||
leftCardinalityMax = getString(leftCardinalityMax,(Element) node, "max");
|
||||
leftCardinalityMin = getCardinalityMinString(leftCardinalityMin,(Element) node, "min");
|
||||
leftCardinalityMax = getCardinalityMinString(leftCardinalityMax,(Element) node, "max");
|
||||
|
||||
}
|
||||
|
||||
for (int j = 0; j < rightCardinalityList.getLength(); j++) {
|
||||
Node node = rightCardinalityList.item(j);
|
||||
rightCardinalityMin = getString(rightCardinalityMin,(Element) node, "min");
|
||||
rightCardinalityMax = getString(rightCardinalityMax,(Element) node, "max");
|
||||
rightCardinalityMin = getCardinalityMinString(rightCardinalityMin,(Element) node, "min");
|
||||
rightCardinalityMax = getCardinalityMinString(rightCardinalityMax,(Element) node, "max");
|
||||
|
||||
}
|
||||
populateRelationshipType(context, leftType, rightType, leftwardType, rightwardType,
|
||||
@@ -182,13 +216,39 @@ public class InitializeEntities {
|
||||
}
|
||||
}
|
||||
|
||||
private String getString(String leftCardinalityMin,Element node, String minOrMax) {
|
||||
/**
|
||||
* Extract the min or max value for the left or right cardinality from the node text content
|
||||
* @param leftCardinalityMin current left cardinality min
|
||||
* @param node node to extract the min or max value from
|
||||
* @param minOrMax element tag name to parse
|
||||
* @return final left cardinality min
|
||||
*/
|
||||
private String getCardinalityMinString(String leftCardinalityMin, Element node, String minOrMax) {
|
||||
if (node.getElementsByTagName(minOrMax).getLength() > 0) {
|
||||
leftCardinalityMin = node.getElementsByTagName(minOrMax).item(0).getTextContent();
|
||||
}
|
||||
return leftCardinalityMin;
|
||||
}
|
||||
|
||||
/**
|
||||
* Populate the relationship type based on values parsed from the XML relationship types configuration
|
||||
*
|
||||
* @param context DSpace context
|
||||
* @param leftType left relationship type (e.g. "Publication").
|
||||
* @param rightType right relationship type (e.g. "Journal").
|
||||
* @param leftwardType leftward relationship type (e.g. "isAuthorOfPublication").
|
||||
* @param rightwardType rightward relationship type (e.g. "isPublicationOfAuthor").
|
||||
* @param leftCardinalityMin left cardinality min
|
||||
* @param leftCardinalityMax left cardinality max
|
||||
* @param rightCardinalityMin right cardinality min
|
||||
* @param rightCardinalityMax right cardinality max
|
||||
* @param copyToLeft copy metadata values to left if right side is deleted
|
||||
* @param copyToRight copy metadata values to right if left side is deleted
|
||||
* @param tilted set a tilted relationship side (left or right) if there are many relationships going one way
|
||||
* to help performance (e.g. authors with 1000s of publications)
|
||||
* @throws SQLException if database error occurs while saving the relationship type
|
||||
* @throws AuthorizeException if authorization error occurs while saving the relationship type
|
||||
*/
|
||||
private void populateRelationshipType(Context context, String leftType, String rightType, String leftwardType,
|
||||
String rightwardType, String leftCardinalityMin, String leftCardinalityMax,
|
||||
String rightCardinalityMin, String rightCardinalityMax,
|
||||
|
@@ -101,6 +101,14 @@ public class OpenSearchServiceImpl implements OpenSearchService {
|
||||
configurationService.getProperty("websvc.opensearch.uicontext");
|
||||
}
|
||||
|
||||
/**
|
||||
* Get base search UI URL (websvc.opensearch.max_num_of_items_per_request)
|
||||
*/
|
||||
public int getMaxNumOfItemsPerRequest() {
|
||||
return configurationService.getIntProperty(
|
||||
"websvc.opensearch.max_num_of_items_per_request", 100);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getContentType(String format) {
|
||||
return "html".equals(format) ? "text/html" :
|
||||
|
@@ -16,7 +16,6 @@ import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.FactoryConfigurationError;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
@@ -160,13 +159,10 @@ public class SubmissionConfigReader {
|
||||
String uri = "file:" + new File(fileName).getAbsolutePath();
|
||||
|
||||
try {
|
||||
DocumentBuilderFactory factory = DocumentBuilderFactory
|
||||
.newInstance();
|
||||
factory.setValidating(false);
|
||||
factory.setIgnoringComments(true);
|
||||
factory.setIgnoringElementContentWhitespace(true);
|
||||
|
||||
DocumentBuilder db = factory.newDocumentBuilder();
|
||||
// This document builder factory will *not* disable external
|
||||
// entities as they can be useful in managing large forms, but
|
||||
// it will restrict them to the config dir containing submission definitions
|
||||
DocumentBuilder db = XMLUtils.getTrustedDocumentBuilder(configDir);
|
||||
Document doc = db.parse(uri);
|
||||
doNodes(doc);
|
||||
} catch (FactoryConfigurationError fe) {
|
||||
@@ -681,4 +677,4 @@ public class SubmissionConfigReader {
|
||||
}
|
||||
return results;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -13,12 +13,12 @@ import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.HttpStatus;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpHead;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.client.DSpaceHttpClientFactory;
|
||||
import org.dspace.app.util.dao.WebAppDAO;
|
||||
import org.dspace.app.util.service.WebAppService;
|
||||
import org.dspace.core.Context;
|
||||
@@ -77,8 +77,8 @@ public class WebAppServiceImpl implements WebAppService {
|
||||
for (WebApp app : webApps) {
|
||||
method = new HttpHead(app.getUrl());
|
||||
int status;
|
||||
try (CloseableHttpClient client = HttpClientBuilder.create().build()) {
|
||||
HttpResponse response = client.execute(method);
|
||||
try (CloseableHttpClient client = DSpaceHttpClientFactory.getInstance().build()) {
|
||||
CloseableHttpResponse response = client.execute(method);
|
||||
status = response.getStatusLine().getStatusCode();
|
||||
}
|
||||
if (status != HttpStatus.SC_OK) {
|
||||
|
@@ -7,12 +7,26 @@
|
||||
*/
|
||||
package org.dspace.app.util;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.stream.XMLInputFactory;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.jdom2.input.SAXBuilder;
|
||||
import org.w3c.dom.Element;
|
||||
import org.w3c.dom.NodeList;
|
||||
import org.xml.sax.EntityResolver;
|
||||
import org.xml.sax.InputSource;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
/**
|
||||
* Simple class to read information from small XML using DOM manipulation
|
||||
@@ -161,4 +175,195 @@ public class XMLUtils {
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize and return a javax DocumentBuilderFactory with NO security
|
||||
* applied. This is intended only for internal, administrative/configuration
|
||||
* use where external entities and other dangerous features are actually
|
||||
* purposefully included.
|
||||
* The method here is tiny, but may be expanded with other features like
|
||||
* whitespace handling, and calling this method name helps to document
|
||||
* the fact that the caller knows it is trusting the XML source / factory.
|
||||
*
|
||||
* @return document builder factory to generate new builders
|
||||
* @throws ParserConfigurationException
|
||||
*/
|
||||
public static DocumentBuilderFactory getTrustedDocumentBuilderFactory()
|
||||
throws ParserConfigurationException {
|
||||
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
|
||||
return factory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize and return the javax DocumentBuilderFactory with some basic security
|
||||
* applied to avoid XXE attacks and other unwanted content inclusion
|
||||
* @return document builder factory to generate new builders
|
||||
* @throws ParserConfigurationException
|
||||
*/
|
||||
public static DocumentBuilderFactory getDocumentBuilderFactory()
|
||||
throws ParserConfigurationException {
|
||||
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
|
||||
// No DOCTYPE / DTDs
|
||||
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
// No external general entities
|
||||
factory.setFeature("http://xml.org/sax/features/external-general-entities", false);
|
||||
// No external parameter entities
|
||||
factory.setFeature("http://xml.org/sax/features/external-parameter-entities", false);
|
||||
// No external DTDs
|
||||
factory.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false);
|
||||
// Even if entities somehow get defined, they will not be expanded
|
||||
factory.setExpandEntityReferences(false);
|
||||
// Disable "XInclude" markup processing
|
||||
factory.setXIncludeAware(false);
|
||||
|
||||
return factory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize and return a javax DocumentBuilder with less security
|
||||
* applied. This is intended only for internal, administrative/configuration
|
||||
* use where external entities and other dangerous features are actually
|
||||
* purposefully included, but are only allowed from specified paths, e.g.
|
||||
* dspace.dir or some other path specified by the java caller.
|
||||
* The method here is tiny, but may be expanded with other features like
|
||||
* whitespace handling, and calling this method name helps to document
|
||||
* the fact that the caller knows it is trusting the XML source / builder
|
||||
* <p>
|
||||
* If no allowedPaths are passed, then all external entities are rejected
|
||||
*
|
||||
* @return document builder with no security features set
|
||||
* @throws ParserConfigurationException if the builder can not be configured
|
||||
*/
|
||||
public static DocumentBuilder getTrustedDocumentBuilder(String... allowedPaths)
|
||||
throws ParserConfigurationException {
|
||||
DocumentBuilderFactory factory = getTrustedDocumentBuilderFactory();
|
||||
factory.setValidating(false);
|
||||
factory.setIgnoringComments(true);
|
||||
factory.setIgnoringElementContentWhitespace(true);
|
||||
DocumentBuilder builder = factory.newDocumentBuilder();
|
||||
builder.setEntityResolver(new PathRestrictedEntityResolver(allowedPaths));
|
||||
return factory.newDocumentBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize and return the javax DocumentBuilder with some basic security applied
|
||||
* to avoid XXE attacks and other unwanted content inclusion
|
||||
* @return document builder for use in XML parsing
|
||||
* @throws ParserConfigurationException if the builder can not be configured
|
||||
*/
|
||||
public static DocumentBuilder getDocumentBuilder()
|
||||
throws ParserConfigurationException {
|
||||
return getDocumentBuilderFactory().newDocumentBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize and return the SAX document builder with some basic security applied
|
||||
* to avoid XXE attacks and other unwanted content inclusion
|
||||
* @return SAX document builder for use in XML parsing
|
||||
*/
|
||||
public static SAXBuilder getSAXBuilder() {
|
||||
return getSAXBuilder(false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize and return the SAX document builder with some basic security applied
|
||||
* to avoid XXE attacks and other unwanted content inclusion
|
||||
* @param validate whether to use JDOM XSD validation
|
||||
* @return SAX document builder for use in XML parsing
|
||||
*/
|
||||
public static SAXBuilder getSAXBuilder(boolean validate) {
|
||||
SAXBuilder saxBuilder = new SAXBuilder();
|
||||
if (validate) {
|
||||
saxBuilder.setValidation(true);
|
||||
}
|
||||
// No DOCTYPE / DTDs
|
||||
saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
// No external general entities
|
||||
saxBuilder.setFeature("http://xml.org/sax/features/external-general-entities", false);
|
||||
// No external parameter entities
|
||||
saxBuilder.setFeature("http://xml.org/sax/features/external-parameter-entities", false);
|
||||
// No external DTDs
|
||||
saxBuilder.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false);
|
||||
// Don't expand entities
|
||||
saxBuilder.setExpandEntities(false);
|
||||
|
||||
return saxBuilder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize and return the Java XML Input Factory with some basic security applied
|
||||
* to avoid XXE attacks and other unwanted content inclusion
|
||||
* @return XML input factory for use in XML parsing
|
||||
*/
|
||||
public static XMLInputFactory getXMLInputFactory() {
|
||||
XMLInputFactory xmlInputFactory = XMLInputFactory.newFactory();
|
||||
xmlInputFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false);
|
||||
|
||||
return xmlInputFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* This entity resolver accepts one or more path strings in its
|
||||
* constructor and throws a SAXException if the entity systemID
|
||||
* is not within the allowed path (or a subdirectory).
|
||||
* If no parameters are passed, then this effectively disallows
|
||||
* any external entity resolution.
|
||||
*/
|
||||
public static class PathRestrictedEntityResolver implements EntityResolver {
|
||||
private final List<String> allowedBasePaths;
|
||||
|
||||
public PathRestrictedEntityResolver(String... allowedBasePaths) {
|
||||
this.allowedBasePaths = Arrays.asList(allowedBasePaths);
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputSource resolveEntity(String publicId, String systemId)
|
||||
throws SAXException, IOException {
|
||||
|
||||
if (systemId == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
String filePath;
|
||||
if (systemId.startsWith("file://")) {
|
||||
filePath = systemId.substring(7);
|
||||
} else if (systemId.startsWith("file:")) {
|
||||
filePath = systemId.substring(5);
|
||||
} else if (!systemId.contains("://")) {
|
||||
filePath = systemId;
|
||||
} else {
|
||||
throw new SAXException("External resources not allowed: " + systemId +
|
||||
". Only local file paths are permitted.");
|
||||
}
|
||||
|
||||
Path resolvedPath;
|
||||
try {
|
||||
resolvedPath = Paths.get(filePath).toAbsolutePath().normalize();
|
||||
} catch (Exception e) {
|
||||
throw new SAXException("Invalid path: " + systemId, e);
|
||||
}
|
||||
|
||||
boolean isAllowed = false;
|
||||
for (String basePath : allowedBasePaths) {
|
||||
Path allowedPath = Paths.get(basePath).toAbsolutePath().normalize();
|
||||
if (resolvedPath.startsWith(allowedPath)) {
|
||||
isAllowed = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!isAllowed) {
|
||||
throw new SAXException("Access denied to path: " + resolvedPath);
|
||||
}
|
||||
|
||||
File file = resolvedPath.toFile();
|
||||
if (!file.exists() || !file.canRead()) {
|
||||
throw new SAXException("File not found or not readable: " + resolvedPath);
|
||||
}
|
||||
|
||||
return new InputSource(new FileInputStream(file));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@@ -117,4 +117,10 @@ public interface OpenSearchService {
|
||||
|
||||
public DSpaceObject resolveScope(Context context, String scope) throws SQLException;
|
||||
|
||||
/**
|
||||
* Retrieves the maximum number of items that can be included in a single opensearch request.
|
||||
*
|
||||
* @return the maximum number of items allowed per request
|
||||
*/
|
||||
int getMaxNumOfItemsPerRequest();
|
||||
}
|
||||
|
@@ -22,12 +22,13 @@ import org.apache.commons.io.IOUtils;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.NameValuePair;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.http.client.entity.UrlEncodedFormEntity;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpUriRequest;
|
||||
import org.apache.http.client.methods.RequestBuilder;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.message.BasicNameValuePair;
|
||||
import org.dspace.app.client.DSpaceHttpClientFactory;
|
||||
import org.dspace.authenticate.oidc.OidcClient;
|
||||
import org.dspace.authenticate.oidc.OidcClientException;
|
||||
import org.dspace.authenticate.oidc.model.OidcTokenResponseDTO;
|
||||
@@ -83,21 +84,17 @@ public class OidcClientImpl implements OidcClient {
|
||||
}
|
||||
|
||||
private <T> T executeAndParseJson(HttpUriRequest httpUriRequest, Class<T> clazz) {
|
||||
|
||||
HttpClient client = HttpClientBuilder.create().build();
|
||||
|
||||
return executeAndReturns(() -> {
|
||||
|
||||
HttpResponse response = client.execute(httpUriRequest);
|
||||
|
||||
if (isNotSuccessfull(response)) {
|
||||
throw new OidcClientException(getStatusCode(response), formatErrorMessage(response));
|
||||
}
|
||||
|
||||
return objectMapper.readValue(getContent(response), clazz);
|
||||
|
||||
});
|
||||
|
||||
try (CloseableHttpClient client = DSpaceHttpClientFactory.getInstance().build()) {
|
||||
return executeAndReturns(() -> {
|
||||
CloseableHttpResponse response = client.execute(httpUriRequest);
|
||||
if (isNotSuccessfull(response)) {
|
||||
throw new OidcClientException(getStatusCode(response), formatErrorMessage(response));
|
||||
}
|
||||
return objectMapper.readValue(getContent(response), clazz);
|
||||
});
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private <T> T executeAndReturns(ThrowingSupplier<T, Exception> supplier) {
|
||||
|
@@ -7,27 +7,22 @@
|
||||
*/
|
||||
package org.dspace.authority.orcid;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.authority.AuthorityValue;
|
||||
import org.dspace.authority.SolrAuthorityInterface;
|
||||
import org.dspace.external.OrcidRestConnector;
|
||||
import org.dspace.external.provider.orcid.xml.XMLtoBio;
|
||||
import org.json.JSONObject;
|
||||
import org.dspace.orcid.model.factory.OrcidFactoryUtils;
|
||||
import org.orcid.jaxb.model.v3.release.common.OrcidIdentifier;
|
||||
import org.orcid.jaxb.model.v3.release.record.Person;
|
||||
import org.orcid.jaxb.model.v3.release.search.Result;
|
||||
@@ -50,6 +45,11 @@ public class Orcidv3SolrAuthorityImpl implements SolrAuthorityInterface {
|
||||
|
||||
private String accessToken;
|
||||
|
||||
/**
|
||||
* Maximum retries to allow for the access token retrieval
|
||||
*/
|
||||
private int maxClientRetries = 3;
|
||||
|
||||
public void setOAUTHUrl(String oAUTHUrl) {
|
||||
OAUTHUrl = oAUTHUrl;
|
||||
}
|
||||
@@ -62,46 +62,32 @@ public class Orcidv3SolrAuthorityImpl implements SolrAuthorityInterface {
|
||||
this.clientSecret = clientSecret;
|
||||
}
|
||||
|
||||
public String getAccessToken() {
|
||||
return accessToken;
|
||||
}
|
||||
|
||||
public void setAccessToken(String accessToken) {
|
||||
this.accessToken = accessToken;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the accessToken that is required for all subsequent calls to ORCID
|
||||
*/
|
||||
public void init() {
|
||||
if (StringUtils.isBlank(accessToken)
|
||||
&& StringUtils.isNotBlank(clientSecret)
|
||||
&& StringUtils.isNotBlank(clientId)
|
||||
&& StringUtils.isNotBlank(OAUTHUrl)) {
|
||||
String authenticationParameters = "?client_id=" + clientId +
|
||||
"&client_secret=" + clientSecret +
|
||||
"&scope=/read-public&grant_type=client_credentials";
|
||||
try {
|
||||
HttpPost httpPost = new HttpPost(OAUTHUrl + authenticationParameters);
|
||||
httpPost.addHeader("Accept", "application/json");
|
||||
httpPost.addHeader("Content-Type", "application/x-www-form-urlencoded");
|
||||
// Initialize access token at spring instantiation. If it fails, the access token will be null rather
|
||||
// than causing a fatal Spring startup error
|
||||
initializeAccessToken();
|
||||
}
|
||||
|
||||
HttpClient httpClient = HttpClientBuilder.create().build();
|
||||
HttpResponse getResponse = httpClient.execute(httpPost);
|
||||
|
||||
JSONObject responseObject = null;
|
||||
try (InputStream is = getResponse.getEntity().getContent();
|
||||
BufferedReader streamReader = new BufferedReader(new InputStreamReader(is, "UTF-8"))) {
|
||||
String inputStr;
|
||||
while ((inputStr = streamReader.readLine()) != null && responseObject == null) {
|
||||
if (inputStr.startsWith("{") && inputStr.endsWith("}") && inputStr.contains("access_token")) {
|
||||
try {
|
||||
responseObject = new JSONObject(inputStr);
|
||||
} catch (Exception e) {
|
||||
//Not as valid as I'd hoped, move along
|
||||
responseObject = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (responseObject != null && responseObject.has("access_token")) {
|
||||
accessToken = (String) responseObject.get("access_token");
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Error during initialization of the Orcid connector", e);
|
||||
}
|
||||
public void initializeAccessToken() {
|
||||
// If we have reaches max retries or the access token is already set, return immediately
|
||||
if (maxClientRetries <= 0 || org.apache.commons.lang3.StringUtils.isNotBlank(accessToken)) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
accessToken = OrcidFactoryUtils.retrieveAccessToken(clientId, clientSecret, OAUTHUrl).orElse(null);
|
||||
} catch (IOException e) {
|
||||
log.error("Error retrieving ORCID access token, {} retries left", --maxClientRetries);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -116,7 +102,7 @@ public class Orcidv3SolrAuthorityImpl implements SolrAuthorityInterface {
|
||||
*/
|
||||
@Override
|
||||
public List<AuthorityValue> queryAuthorities(String text, int max) {
|
||||
init();
|
||||
initializeAccessToken();
|
||||
List<Person> bios = queryBio(text, max);
|
||||
List<AuthorityValue> result = new ArrayList<>();
|
||||
for (Person person : bios) {
|
||||
@@ -135,7 +121,7 @@ public class Orcidv3SolrAuthorityImpl implements SolrAuthorityInterface {
|
||||
*/
|
||||
@Override
|
||||
public AuthorityValue queryAuthorityID(String id) {
|
||||
init();
|
||||
initializeAccessToken();
|
||||
Person person = getBio(id);
|
||||
AuthorityValue valueFromPerson = Orcidv3AuthorityValue.create(person);
|
||||
return valueFromPerson;
|
||||
@@ -151,11 +137,14 @@ public class Orcidv3SolrAuthorityImpl implements SolrAuthorityInterface {
|
||||
if (!isValid(id)) {
|
||||
return null;
|
||||
}
|
||||
init();
|
||||
if (orcidRestConnector == null) {
|
||||
log.error("ORCID REST connector is null, returning null Person");
|
||||
return null;
|
||||
}
|
||||
initializeAccessToken();
|
||||
InputStream bioDocument = orcidRestConnector.get(id + ((id.endsWith("/person")) ? "" : "/person"), accessToken);
|
||||
XMLtoBio converter = new XMLtoBio();
|
||||
Person person = converter.convertSinglePerson(bioDocument);
|
||||
return person;
|
||||
return converter.convertSinglePerson(bioDocument);
|
||||
}
|
||||
|
||||
|
||||
@@ -167,10 +156,16 @@ public class Orcidv3SolrAuthorityImpl implements SolrAuthorityInterface {
|
||||
* @return List<Person>
|
||||
*/
|
||||
public List<Person> queryBio(String text, int start, int rows) {
|
||||
init();
|
||||
if (rows > 100) {
|
||||
throw new IllegalArgumentException("The maximum number of results to retrieve cannot exceed 100.");
|
||||
}
|
||||
// Check REST connector is initialized
|
||||
if (orcidRestConnector == null) {
|
||||
log.error("ORCID REST connector is not initialized, returning empty list");
|
||||
return Collections.emptyList();
|
||||
}
|
||||
// Check / init access token
|
||||
initializeAccessToken();
|
||||
|
||||
String searchPath = "search?q=" + URLEncoder.encode(text) + "&start=" + start + "&rows=" + rows;
|
||||
log.debug("queryBio searchPath=" + searchPath + " accessToken=" + accessToken);
|
||||
|
@@ -9,6 +9,7 @@ package org.dspace.authorize;
|
||||
|
||||
import static org.dspace.app.util.AuthorizeUtil.canCollectionAdminManageAccounts;
|
||||
import static org.dspace.app.util.AuthorizeUtil.canCommunityAdminManageAccounts;
|
||||
import static org.dspace.discovery.SearchUtils.RESOURCE_TYPE_FIELD;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
@@ -736,7 +737,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
*/
|
||||
@Override
|
||||
public boolean isCommunityAdmin(Context context) throws SQLException {
|
||||
return performCheck(context, "search.resourcetype:" + IndexableCommunity.TYPE);
|
||||
return performCheck(context, RESOURCE_TYPE_FIELD + ":" + IndexableCommunity.TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -749,7 +750,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
*/
|
||||
@Override
|
||||
public boolean isCollectionAdmin(Context context) throws SQLException {
|
||||
return performCheck(context, "search.resourcetype:" + IndexableCollection.TYPE);
|
||||
return performCheck(context, RESOURCE_TYPE_FIELD + ":" + IndexableCollection.TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -762,7 +763,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
*/
|
||||
@Override
|
||||
public boolean isItemAdmin(Context context) throws SQLException {
|
||||
return performCheck(context, "search.resourcetype:" + IndexableItem.TYPE);
|
||||
return performCheck(context, RESOURCE_TYPE_FIELD + ":" + IndexableItem.TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -776,8 +777,8 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
@Override
|
||||
public boolean isComColAdmin(Context context) throws SQLException {
|
||||
return performCheck(context,
|
||||
"(search.resourcetype:" + IndexableCommunity.TYPE + " OR search.resourcetype:" +
|
||||
IndexableCollection.TYPE + ")");
|
||||
"(" + RESOURCE_TYPE_FIELD + ":" + IndexableCommunity.TYPE + " OR " +
|
||||
RESOURCE_TYPE_FIELD + ":" + IndexableCollection.TYPE + ")");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -795,7 +796,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
throws SearchServiceException, SQLException {
|
||||
List<Community> communities = new ArrayList<>();
|
||||
query = formatCustomQuery(query);
|
||||
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
||||
DiscoverResult discoverResult = getDiscoverResult(context, query + RESOURCE_TYPE_FIELD + ":" +
|
||||
IndexableCommunity.TYPE,
|
||||
offset, limit, null, null);
|
||||
for (IndexableObject solrCollections : discoverResult.getIndexableObjects()) {
|
||||
@@ -817,9 +818,9 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
public long countAdminAuthorizedCommunity(Context context, String query)
|
||||
throws SearchServiceException, SQLException {
|
||||
query = formatCustomQuery(query);
|
||||
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
||||
DiscoverResult discoverResult = getDiscoverResult(context, query + RESOURCE_TYPE_FIELD + ":" +
|
||||
IndexableCommunity.TYPE,
|
||||
null, null, null, null);
|
||||
null, 0, null, null);
|
||||
return discoverResult.getTotalSearchResults();
|
||||
}
|
||||
|
||||
@@ -842,7 +843,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
}
|
||||
|
||||
query = formatCustomQuery(query);
|
||||
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
||||
DiscoverResult discoverResult = getDiscoverResult(context, query + RESOURCE_TYPE_FIELD + ":" +
|
||||
IndexableCollection.TYPE,
|
||||
offset, limit, CollectionService.SOLR_SORT_FIELD, SORT_ORDER.asc);
|
||||
for (IndexableObject solrCollections : discoverResult.getIndexableObjects()) {
|
||||
@@ -864,9 +865,9 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
public long countAdminAuthorizedCollection(Context context, String query)
|
||||
throws SearchServiceException, SQLException {
|
||||
query = formatCustomQuery(query);
|
||||
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
||||
DiscoverResult discoverResult = getDiscoverResult(context, query + RESOURCE_TYPE_FIELD + ":" +
|
||||
IndexableCollection.TYPE,
|
||||
null, null, null, null);
|
||||
null, 0, null, null);
|
||||
return discoverResult.getTotalSearchResults();
|
||||
}
|
||||
|
||||
|
@@ -19,6 +19,7 @@ import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.lang3.ObjectUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.authorize.dao.ResourcePolicyDAO;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.authorize.service.ResourcePolicyService;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
@@ -51,6 +52,9 @@ public class ResourcePolicyServiceImpl implements ResourcePolicyService {
|
||||
@Autowired
|
||||
private GroupService groupService;
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
protected ResourcePolicyServiceImpl() {
|
||||
}
|
||||
|
||||
@@ -422,6 +426,6 @@ public class ResourcePolicyServiceImpl implements ResourcePolicyService {
|
||||
} else if (group != null && groupService.isMember(context, eperson, group)) {
|
||||
isMy = true;
|
||||
}
|
||||
return isMy;
|
||||
return isMy || authorizeService.isAdmin(context, eperson, resourcePolicy.getdSpaceObject());
|
||||
}
|
||||
}
|
||||
|
@@ -7,6 +7,9 @@
|
||||
*/
|
||||
package org.dspace.browse;
|
||||
|
||||
import static org.dspace.discovery.SearchUtils.RESOURCE_ID_FIELD;
|
||||
import static org.dspace.discovery.SearchUtils.RESOURCE_TYPE_FIELD;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
@@ -23,7 +26,6 @@ import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.discovery.DiscoverFacetField;
|
||||
import org.dspace.discovery.DiscoverQuery;
|
||||
import org.dspace.discovery.DiscoverQuery.SORT_ORDER;
|
||||
import org.dspace.discovery.DiscoverResult;
|
||||
@@ -34,7 +36,6 @@ import org.dspace.discovery.SearchService;
|
||||
import org.dspace.discovery.SearchServiceException;
|
||||
import org.dspace.discovery.SearchUtils;
|
||||
import org.dspace.discovery.configuration.DiscoveryConfiguration;
|
||||
import org.dspace.discovery.configuration.DiscoveryConfigurationParameters;
|
||||
import org.dspace.discovery.indexobject.IndexableItem;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
@@ -181,32 +182,28 @@ public class SolrBrowseDAO implements BrowseDAO {
|
||||
addLocationScopeFilter(query);
|
||||
addDefaultFilterQueries(query);
|
||||
if (distinct) {
|
||||
DiscoverFacetField dff;
|
||||
|
||||
// To get the number of distinct values we use the next "json.facet" query param
|
||||
// {"entries_count": {"type":"terms","field": "<fieldName>_filter", "limit":0, "numBuckets":true}}"
|
||||
// We use a json.facet query for metadata browsing because it allows us to limit the results
|
||||
// while obtaining the total number of facet values with numBuckets:true and sort in reverse order
|
||||
// Example of json.facet query:
|
||||
// {"<fieldName>": {"type":"terms","field": "<fieldName>_filter", "limit":0, "offset":0,
|
||||
// "sort":"index desc", "numBuckets":true, "prefix":"<startsWith>"}}
|
||||
ObjectNode jsonFacet = JsonNodeFactory.instance.objectNode();
|
||||
ObjectNode entriesCount = JsonNodeFactory.instance.objectNode();
|
||||
entriesCount.put("type", "terms");
|
||||
entriesCount.put("field", facetField + "_filter");
|
||||
entriesCount.put("limit", 0);
|
||||
entriesCount.put("numBuckets", true);
|
||||
jsonFacet.set("entries_count", entriesCount);
|
||||
|
||||
if (StringUtils.isNotBlank(startsWith)) {
|
||||
dff = new DiscoverFacetField(facetField,
|
||||
DiscoveryConfigurationParameters.TYPE_TEXT, limit,
|
||||
DiscoveryConfigurationParameters.SORT.VALUE, startsWith, offset);
|
||||
|
||||
// Add the prefix to the json facet query
|
||||
entriesCount.put("prefix", startsWith);
|
||||
ObjectNode entriesFacet = JsonNodeFactory.instance.objectNode();
|
||||
entriesFacet.put("type", "terms");
|
||||
entriesFacet.put("field", facetField + "_filter");
|
||||
entriesFacet.put("limit", limit);
|
||||
entriesFacet.put("offset", offset);
|
||||
entriesFacet.put("numBuckets", true);
|
||||
if (ascending) {
|
||||
entriesFacet.put("sort", "index");
|
||||
} else {
|
||||
dff = new DiscoverFacetField(facetField,
|
||||
DiscoveryConfigurationParameters.TYPE_TEXT, limit,
|
||||
DiscoveryConfigurationParameters.SORT.VALUE, offset);
|
||||
entriesFacet.put("sort", "index desc");
|
||||
}
|
||||
query.addFacetField(dff);
|
||||
query.setFacetMinCount(1);
|
||||
if (StringUtils.isNotBlank(startsWith)) {
|
||||
// Add the prefix to the json facet query
|
||||
entriesFacet.put("prefix", startsWith);
|
||||
}
|
||||
jsonFacet.set(facetField, entriesFacet);
|
||||
query.setMaxResults(0);
|
||||
query.addProperty("json.facet", jsonFacet.toString());
|
||||
} else {
|
||||
@@ -282,26 +279,15 @@ public class SolrBrowseDAO implements BrowseDAO {
|
||||
DiscoverResult resp = getSolrResponse();
|
||||
List<FacetResult> facet = resp.getFacetResult(facetField);
|
||||
int count = doCountQuery();
|
||||
int start = 0;
|
||||
int max = facet.size();
|
||||
List<String[]> result = new ArrayList<>();
|
||||
if (ascending) {
|
||||
for (int i = start; i < (start + max) && i < count; i++) {
|
||||
FacetResult c = facet.get(i);
|
||||
String freq = showFrequencies ? String.valueOf(c.getCount())
|
||||
: "";
|
||||
result.add(new String[] {c.getDisplayedValue(),
|
||||
c.getAuthorityKey(), freq});
|
||||
}
|
||||
} else {
|
||||
for (int i = count - start - 1; i >= count - (start + max)
|
||||
&& i >= 0; i--) {
|
||||
FacetResult c = facet.get(i);
|
||||
String freq = showFrequencies ? String.valueOf(c.getCount())
|
||||
: "";
|
||||
result.add(new String[] {c.getDisplayedValue(),
|
||||
c.getAuthorityKey(), freq});
|
||||
}
|
||||
|
||||
for (int i = 0; i < max && i < count; i++) {
|
||||
FacetResult c = facet.get(i);
|
||||
String freq = showFrequencies ? String.valueOf(c.getCount())
|
||||
: "";
|
||||
result.add(new String[] {c.getDisplayedValue(),
|
||||
c.getAuthorityKey(), freq});
|
||||
}
|
||||
|
||||
return result;
|
||||
@@ -325,8 +311,10 @@ public class SolrBrowseDAO implements BrowseDAO {
|
||||
public String doMaxQuery(String column, String table, int itemID)
|
||||
throws BrowseException {
|
||||
DiscoverQuery query = new DiscoverQuery();
|
||||
query.setQuery("search.resourceid:" + itemID
|
||||
+ " AND search.resourcetype:" + IndexableItem.TYPE);
|
||||
query.setQuery("*:*");
|
||||
query.addFilterQueries(
|
||||
RESOURCE_ID_FIELD + ":" + itemID,
|
||||
RESOURCE_TYPE_FIELD + ":" + IndexableItem.TYPE);
|
||||
query.setMaxResults(1);
|
||||
DiscoverResult resp = null;
|
||||
try {
|
||||
|
@@ -131,7 +131,7 @@ public final class CheckerCommand {
|
||||
collector.collect(context, info);
|
||||
}
|
||||
|
||||
context.uncacheEntity(bitstream);
|
||||
context.commit();
|
||||
bitstream = dispatcher.next();
|
||||
}
|
||||
}
|
||||
|
@@ -56,8 +56,8 @@ public class MostRecentChecksumDAOImpl extends AbstractHibernateDAO<MostRecentCh
|
||||
criteriaQuery.where(criteriaBuilder.and(
|
||||
criteriaBuilder.equal(mostRecentChecksumRoot.get(MostRecentChecksum_.toBeProcessed), false),
|
||||
criteriaBuilder
|
||||
.lessThanOrEqualTo(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), startDate),
|
||||
criteriaBuilder.greaterThan(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), endDate)
|
||||
.lessThanOrEqualTo(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), endDate),
|
||||
criteriaBuilder.greaterThan(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), startDate)
|
||||
)
|
||||
);
|
||||
List<Order> orderList = new LinkedList<>();
|
||||
|
@@ -1021,7 +1021,8 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
if (StringUtils.isNotBlank(q)) {
|
||||
StringBuilder buildQuery = new StringBuilder();
|
||||
String escapedQuery = ClientUtils.escapeQueryChars(q);
|
||||
buildQuery.append("(").append(escapedQuery).append(" OR ").append(escapedQuery).append("*").append(")");
|
||||
buildQuery.append("(").append(escapedQuery).append(" OR dc.title_sort:*")
|
||||
.append(escapedQuery).append("*").append(")");
|
||||
discoverQuery.setQuery(buildQuery.toString());
|
||||
}
|
||||
DiscoverResult resp = searchService.search(context, discoverQuery);
|
||||
|
@@ -187,11 +187,11 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
String authority) {
|
||||
List<MetadataValue> metadata = getMetadata(dso, schema, element, qualifier, lang);
|
||||
List<MetadataValue> result = new ArrayList<>(metadata);
|
||||
if (!authority.equals(Item.ANY)) {
|
||||
if (!Item.ANY.equals(authority)) {
|
||||
Iterator<MetadataValue> iterator = result.iterator();
|
||||
while (iterator.hasNext()) {
|
||||
MetadataValue metadataValue = iterator.next();
|
||||
if (!authority.equals(metadataValue.getAuthority())) {
|
||||
if (!StringUtils.equals(authority, metadataValue.getAuthority())) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
@@ -509,7 +509,7 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
MetadataField metadataField = metadataValue.getMetadataField();
|
||||
MetadataSchema metadataSchema = metadataField.getMetadataSchema();
|
||||
// We will attempt to disprove a match - if we can't we have a match
|
||||
if (!element.equals(Item.ANY) && !element.equals(metadataField.getElement())) {
|
||||
if (!Item.ANY.equals(element) && !StringUtils.equals(element, metadataField.getElement())) {
|
||||
// Elements do not match, no wildcard
|
||||
return false;
|
||||
}
|
||||
@@ -520,9 +520,9 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
// Value is qualified, so no match
|
||||
return false;
|
||||
}
|
||||
} else if (!qualifier.equals(Item.ANY)) {
|
||||
} else if (!Item.ANY.equals(qualifier)) {
|
||||
// Not a wildcard, so qualifier must match exactly
|
||||
if (!qualifier.equals(metadataField.getQualifier())) {
|
||||
if (!StringUtils.equals(qualifier, metadataField.getQualifier())) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -533,15 +533,15 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
// Value is qualified, so no match
|
||||
return false;
|
||||
}
|
||||
} else if (!language.equals(Item.ANY)) {
|
||||
} else if (!Item.ANY.equals(language)) {
|
||||
// Not a wildcard, so language must match exactly
|
||||
if (!language.equals(metadataValue.getLanguage())) {
|
||||
if (!StringUtils.equals(language, metadataValue.getLanguage())) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (!schema.equals(Item.ANY)) {
|
||||
if (metadataSchema != null && !metadataSchema.getName().equals(schema)) {
|
||||
if (!Item.ANY.equals(schema)) {
|
||||
if (!StringUtils.equals(schema, metadataSchema.getName())) {
|
||||
// The namespace doesn't match
|
||||
return false;
|
||||
}
|
||||
|
@@ -30,6 +30,7 @@ import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.discovery.SolrSearchCore;
|
||||
import org.dspace.discovery.indexobject.IndexableCollection;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.eperson.service.GroupService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
@@ -124,24 +125,33 @@ public class EntityTypeServiceImpl implements EntityTypeService {
|
||||
public List<String> getSubmitAuthorizedTypes(Context context)
|
||||
throws SQLException, SolrServerException, IOException {
|
||||
List<String> types = new ArrayList<>();
|
||||
StringBuilder query = new StringBuilder();
|
||||
org.dspace.eperson.EPerson currentUser = context.getCurrentUser();
|
||||
StringBuilder query = null;
|
||||
EPerson currentUser = context.getCurrentUser();
|
||||
if (!authorizeService.isAdmin(context)) {
|
||||
String userId = "";
|
||||
if (currentUser != null) {
|
||||
userId = currentUser.getID().toString();
|
||||
query = new StringBuilder();
|
||||
query.append("submit:(e").append(userId);
|
||||
}
|
||||
query.append("submit:(e").append(userId);
|
||||
|
||||
Set<Group> groups = groupService.allMemberGroupsSet(context, currentUser);
|
||||
for (Group group : groups) {
|
||||
query.append(" OR g").append(group.getID());
|
||||
if (query == null) {
|
||||
query = new StringBuilder();
|
||||
query.append("submit:(g");
|
||||
} else {
|
||||
query.append(" OR g");
|
||||
}
|
||||
query.append(group.getID());
|
||||
}
|
||||
query.append(")");
|
||||
} else {
|
||||
query.append("*:*");
|
||||
}
|
||||
|
||||
SolrQuery sQuery = new SolrQuery(query.toString());
|
||||
SolrQuery sQuery = new SolrQuery("*:*");
|
||||
if (query != null) {
|
||||
sQuery.addFilterQuery(query.toString());
|
||||
}
|
||||
sQuery.addFilterQuery("search.resourcetype:" + IndexableCollection.TYPE);
|
||||
sQuery.setRows(0);
|
||||
sQuery.addFacetField("search.entitytype");
|
||||
|
@@ -23,6 +23,7 @@ import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.scripts.handler.DSpaceRunnableHandler;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
@@ -36,6 +37,11 @@ public class MetadataDSpaceCsvExportServiceImpl implements MetadataDSpaceCsvExpo
|
||||
@Autowired
|
||||
private DSpaceObjectUtils dSpaceObjectUtils;
|
||||
|
||||
@Autowired
|
||||
private ConfigurationService configurationService;
|
||||
|
||||
private int csxExportLimit = -1;
|
||||
|
||||
@Override
|
||||
public DSpaceCSV handleExport(Context context, boolean exportAllItems, boolean exportAllMetadata, String identifier,
|
||||
DSpaceRunnableHandler handler) throws Exception {
|
||||
@@ -43,7 +49,7 @@ public class MetadataDSpaceCsvExportServiceImpl implements MetadataDSpaceCsvExpo
|
||||
|
||||
if (exportAllItems) {
|
||||
handler.logInfo("Exporting whole repository WARNING: May take some time!");
|
||||
toExport = itemService.findAll(context);
|
||||
toExport = itemService.findAll(context, getCsvExportLimit(), 0);
|
||||
} else {
|
||||
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService()
|
||||
.resolveToObject(context, identifier);
|
||||
@@ -63,7 +69,7 @@ public class MetadataDSpaceCsvExportServiceImpl implements MetadataDSpaceCsvExpo
|
||||
} else if (dso.getType() == Constants.COLLECTION) {
|
||||
handler.logInfo("Exporting collection '" + dso.getName() + "' (" + identifier + ")");
|
||||
Collection collection = (Collection) dso;
|
||||
toExport = itemService.findByCollection(context, collection);
|
||||
toExport = itemService.findByCollection(context, collection, getCsvExportLimit(), 0);
|
||||
} else if (dso.getType() == Constants.COMMUNITY) {
|
||||
handler.logInfo("Exporting community '" + dso.getName() + "' (" + identifier + ")");
|
||||
toExport = buildFromCommunity(context, (Community) dso);
|
||||
@@ -74,18 +80,21 @@ public class MetadataDSpaceCsvExportServiceImpl implements MetadataDSpaceCsvExpo
|
||||
}
|
||||
}
|
||||
|
||||
DSpaceCSV csv = this.export(context, toExport, exportAllMetadata);
|
||||
DSpaceCSV csv = this.export(context, toExport, exportAllMetadata, handler);
|
||||
return csv;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DSpaceCSV export(Context context, Iterator<Item> toExport, boolean exportAll) throws Exception {
|
||||
public DSpaceCSV export(Context context, Iterator<Item> toExport,
|
||||
boolean exportAll, DSpaceRunnableHandler handler) throws Exception {
|
||||
Context.Mode originalMode = context.getCurrentMode();
|
||||
context.setMode(Context.Mode.READ_ONLY);
|
||||
|
||||
// Process each item
|
||||
// Process each item until we reach the limit
|
||||
int itemExportLimit = getCsvExportLimit();
|
||||
DSpaceCSV csv = new DSpaceCSV(exportAll);
|
||||
while (toExport.hasNext()) {
|
||||
|
||||
for (int itemsAdded = 0; toExport.hasNext() && itemsAdded < itemExportLimit; itemsAdded++) {
|
||||
Item item = toExport.next();
|
||||
csv.addItem(item);
|
||||
context.uncacheEntity(item);
|
||||
@@ -97,8 +106,9 @@ public class MetadataDSpaceCsvExportServiceImpl implements MetadataDSpaceCsvExpo
|
||||
}
|
||||
|
||||
@Override
|
||||
public DSpaceCSV export(Context context, Community community, boolean exportAll) throws Exception {
|
||||
return export(context, buildFromCommunity(context, community), exportAll);
|
||||
public DSpaceCSV export(Context context, Community community,
|
||||
boolean exportAll, DSpaceRunnableHandler handler) throws Exception {
|
||||
return export(context, buildFromCommunity(context, community), exportAll, handler);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -117,21 +127,30 @@ public class MetadataDSpaceCsvExportServiceImpl implements MetadataDSpaceCsvExpo
|
||||
// Add all the collections
|
||||
List<Collection> collections = community.getCollections();
|
||||
for (Collection collection : collections) {
|
||||
Iterator<Item> items = itemService.findByCollection(context, collection);
|
||||
while (items.hasNext()) {
|
||||
// Never obtain more items than the configured limit
|
||||
Iterator<Item> items = itemService.findByCollection(context, collection, getCsvExportLimit(), 0);
|
||||
while (result.size() < getCsvExportLimit() && items.hasNext()) {
|
||||
result.add(items.next());
|
||||
}
|
||||
}
|
||||
|
||||
// Add all the sub-communities
|
||||
// Add all the sub-communities
|
||||
List<Community> communities = community.getSubcommunities();
|
||||
for (Community subCommunity : communities) {
|
||||
Iterator<Item> items = buildFromCommunity(context, subCommunity);
|
||||
while (items.hasNext()) {
|
||||
while (result.size() < getCsvExportLimit() && items.hasNext()) {
|
||||
result.add(items.next());
|
||||
}
|
||||
}
|
||||
|
||||
return result.iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getCsvExportLimit() {
|
||||
if (csxExportLimit == -1) {
|
||||
csxExportLimit = configurationService.getIntProperty("bulkedit.export.max.items", 500);
|
||||
}
|
||||
return csxExportLimit;
|
||||
}
|
||||
}
|
||||
|
@@ -577,7 +577,7 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
|
||||
.collect(Collectors.toList()));
|
||||
}
|
||||
DiscoverySearchFilterFacet matchingFacet = null;
|
||||
for (DiscoverySearchFilterFacet facetConfig : searchConfigurationService.getAllFacetsConfig()) {
|
||||
for (DiscoverySearchFilterFacet facetConfig : searchConfigurationService.getAllUniqueFacetsConfig()) {
|
||||
boolean coversAllFieldsFromVocab = true;
|
||||
for (String fieldFromVocab: metadataFields) {
|
||||
boolean coversFieldFromVocab = false;
|
||||
|
@@ -34,35 +34,37 @@ import org.xml.sax.InputSource;
|
||||
* from {@code ${dspace.dir}/config/controlled-vocabularies/*.xml} and turns
|
||||
* them into autocompleting authorities.
|
||||
*
|
||||
* Configuration: This MUST be configured as a self-named plugin, e.g.: {@code
|
||||
* plugin.selfnamed.org.dspace.content.authority.ChoiceAuthority = \
|
||||
* <p>Configuration: This MUST be configured as a self-named plugin, e.g.: {@code
|
||||
* plugin.selfnamed.org.dspace.content.authority.ChoiceAuthority =
|
||||
* org.dspace.content.authority.DSpaceControlledVocabulary
|
||||
* }
|
||||
*
|
||||
* It AUTOMATICALLY configures a plugin instance for each XML file in the
|
||||
* <p>It AUTOMATICALLY configures a plugin instance for each XML file in the
|
||||
* controlled vocabularies directory. The name of the plugin is the basename of
|
||||
* the file; e.g., {@code ${dspace.dir}/config/controlled-vocabularies/nsi.xml}
|
||||
* would generate a plugin called "nsi".
|
||||
*
|
||||
* Each configured plugin comes with three configuration options: {@code
|
||||
* vocabulary.plugin._plugin_.hierarchy.store = <true|false>
|
||||
* # Store entire hierarchy along with selected value. Default: TRUE
|
||||
* vocabulary.plugin._plugin_.hierarchy.suggest =
|
||||
* <true|false> # Display entire hierarchy in the suggestion list. Default: TRUE
|
||||
* vocabulary.plugin._plugin_.delimiter = "<string>"
|
||||
* # Delimiter to use when building hierarchy strings. Default: "::"
|
||||
* }
|
||||
* <p>Each configured plugin comes with three configuration options:
|
||||
* <ul>
|
||||
* <li>{@code vocabulary.plugin._plugin_.hierarchy.store = <true|false>}
|
||||
* # Store entire hierarchy along with selected value. Default: TRUE</li>
|
||||
* <li>{@code vocabulary.plugin._plugin_.hierarchy.suggest =
|
||||
* <true|false> # Display entire hierarchy in the suggestion list. Default: TRUE}</li>
|
||||
* <li>{@code vocabulary.plugin._plugin_.delimiter = "<string>"
|
||||
* # Delimiter to use when building hierarchy strings. Default: "::"}</li>
|
||||
* </ul>
|
||||
*
|
||||
* @author Michael B. Klein
|
||||
*/
|
||||
|
||||
public class DSpaceControlledVocabulary extends SelfNamedPlugin implements HierarchicalAuthority {
|
||||
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(DSpaceControlledVocabulary.class);
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger();
|
||||
protected static String xpathTemplate = "//node[contains(translate(@label,'ABCDEFGHIJKLMNOPQRSTUVWXYZ'," +
|
||||
"'abcdefghijklmnopqrstuvwxyz'),'%s')]";
|
||||
protected static String idTemplate = "//node[@id = '%s']";
|
||||
protected static String labelTemplate = "//node[@label = '%s']";
|
||||
"'abcdefghijklmnopqrstuvwxyz'),%s)]";
|
||||
protected static String idTemplate = "//node[@id = %s]";
|
||||
protected static String idTemplateQuoted = "//node[@id = '%s']";
|
||||
protected static String labelTemplate = "//node[@label = %s]";
|
||||
protected static String idParentTemplate = "//node[@id = '%s']/parent::isComposedBy/parent::node";
|
||||
protected static String rootTemplate = "/node";
|
||||
protected static String pluginNames[] = null;
|
||||
@@ -106,7 +108,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
File.separator + "config" +
|
||||
File.separator + "controlled-vocabularies";
|
||||
String[] xmlFiles = (new File(vocabulariesPath)).list(new xmlFilter());
|
||||
List<String> names = new ArrayList<String>();
|
||||
List<String> names = new ArrayList<>();
|
||||
for (String filename : xmlFiles) {
|
||||
names.add((new File(filename)).getName().replace(".xml", ""));
|
||||
}
|
||||
@@ -162,14 +164,23 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
public Choices getMatches(String text, int start, int limit, String locale) {
|
||||
init();
|
||||
log.debug("Getting matches for '" + text + "'");
|
||||
String xpathExpression = "";
|
||||
String[] textHierarchy = text.split(hierarchyDelimiter, -1);
|
||||
StringBuilder xpathExpressionBuilder = new StringBuilder();
|
||||
for (int i = 0; i < textHierarchy.length; i++) {
|
||||
xpathExpression += String.format(xpathTemplate, textHierarchy[i].replaceAll("'", "'").toLowerCase());
|
||||
xpathExpressionBuilder.append(String.format(xpathTemplate, "$var" + i));
|
||||
}
|
||||
String xpathExpression = xpathExpressionBuilder.toString();
|
||||
XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
int total = 0;
|
||||
List<Choice> choices = new ArrayList<Choice>();
|
||||
xpath.setXPathVariableResolver(variableName -> {
|
||||
String varName = variableName.getLocalPart();
|
||||
if (varName.startsWith("var")) {
|
||||
int index = Integer.parseInt(varName.substring(3));
|
||||
return textHierarchy[index].toLowerCase();
|
||||
}
|
||||
throw new IllegalArgumentException("Unexpected variable: " + varName);
|
||||
});
|
||||
int total;
|
||||
List<Choice> choices;
|
||||
try {
|
||||
NodeList results = (NodeList) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODESET);
|
||||
total = results.getLength();
|
||||
@@ -185,14 +196,23 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
@Override
|
||||
public Choices getBestMatch(String text, String locale) {
|
||||
init();
|
||||
log.debug("Getting best matches for '" + text + "'");
|
||||
String xpathExpression = "";
|
||||
log.debug("Getting best matches for {}'", text);
|
||||
String[] textHierarchy = text.split(hierarchyDelimiter, -1);
|
||||
StringBuilder xpathExpressionBuilder = new StringBuilder();
|
||||
for (int i = 0; i < textHierarchy.length; i++) {
|
||||
xpathExpression += String.format(labelTemplate, textHierarchy[i].replaceAll("'", "'"));
|
||||
xpathExpressionBuilder.append(String.format(labelTemplate, "$var" + i));
|
||||
}
|
||||
String xpathExpression = xpathExpressionBuilder.toString();
|
||||
XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
List<Choice> choices = new ArrayList<Choice>();
|
||||
xpath.setXPathVariableResolver(variableName -> {
|
||||
String varName = variableName.getLocalPart();
|
||||
if (varName.startsWith("var")) {
|
||||
int index = Integer.parseInt(varName.substring(3));
|
||||
return textHierarchy[index];
|
||||
}
|
||||
throw new IllegalArgumentException("Unexpected variable: " + varName);
|
||||
});
|
||||
List<Choice> choices;
|
||||
try {
|
||||
NodeList results = (NodeList) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODESET);
|
||||
choices = getChoicesFromNodeList(results, 0, 1);
|
||||
@@ -240,7 +260,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
@Override
|
||||
public Choices getChoicesByParent(String authorityName, String parentId, int start, int limit, String locale) {
|
||||
init();
|
||||
String xpathExpression = String.format(idTemplate, parentId);
|
||||
String xpathExpression = String.format(idTemplateQuoted, parentId);
|
||||
return getChoicesByXpath(xpathExpression, start, limit);
|
||||
}
|
||||
|
||||
@@ -264,15 +284,12 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
}
|
||||
|
||||
private boolean isRootElement(Node node) {
|
||||
if (node != null && node.getOwnerDocument().getDocumentElement().equals(node)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
return node != null && node.getOwnerDocument().getDocumentElement().equals(node);
|
||||
}
|
||||
|
||||
private Node getNode(String key) throws XPathExpressionException {
|
||||
init();
|
||||
String xpathExpression = String.format(idTemplate, key);
|
||||
String xpathExpression = String.format(idTemplateQuoted, key);
|
||||
Node node = getNodeFromXPath(xpathExpression);
|
||||
return node;
|
||||
}
|
||||
@@ -284,7 +301,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
}
|
||||
|
||||
private List<Choice> getChoicesFromNodeList(NodeList results, int start, int limit) {
|
||||
List<Choice> choices = new ArrayList<Choice>();
|
||||
List<Choice> choices = new ArrayList<>();
|
||||
for (int i = 0; i < results.getLength(); i++) {
|
||||
if (i < start) {
|
||||
continue;
|
||||
@@ -303,14 +320,14 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
|
||||
private Map<String, String> addOtherInformation(String parentCurr, String noteCurr,
|
||||
List<String> childrenCurr, String authorityCurr) {
|
||||
Map<String, String> extras = new HashMap<String, String>();
|
||||
Map<String, String> extras = new HashMap<>();
|
||||
if (StringUtils.isNotBlank(parentCurr)) {
|
||||
extras.put("parent", parentCurr);
|
||||
}
|
||||
if (StringUtils.isNotBlank(noteCurr)) {
|
||||
extras.put("note", noteCurr);
|
||||
}
|
||||
if (childrenCurr.size() > 0) {
|
||||
if (!childrenCurr.isEmpty()) {
|
||||
extras.put("hasChildren", "true");
|
||||
} else {
|
||||
extras.put("hasChildren", "false");
|
||||
@@ -368,7 +385,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
}
|
||||
|
||||
private List<String> getChildren(Node node) {
|
||||
List<String> children = new ArrayList<String>();
|
||||
List<String> children = new ArrayList<>();
|
||||
NodeList childNodes = node.getChildNodes();
|
||||
for (int ci = 0; ci < childNodes.getLength(); ci++) {
|
||||
Node firstChild = childNodes.item(ci);
|
||||
@@ -391,7 +408,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
private boolean isSelectable(Node node) {
|
||||
Node selectableAttr = node.getAttributes().getNamedItem("selectable");
|
||||
if (null != selectableAttr) {
|
||||
return Boolean.valueOf(selectableAttr.getNodeValue());
|
||||
return Boolean.parseBoolean(selectableAttr.getNodeValue());
|
||||
} else { // Default is true
|
||||
return true;
|
||||
}
|
||||
@@ -418,7 +435,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
}
|
||||
|
||||
private Choices getChoicesByXpath(String xpathExpression, int start, int limit) {
|
||||
List<Choice> choices = new ArrayList<Choice>();
|
||||
List<Choice> choices = new ArrayList<>();
|
||||
XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
try {
|
||||
Node parentNode = (Node) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODE);
|
||||
|
@@ -14,6 +14,7 @@ import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.packager.PackageDisseminator;
|
||||
@@ -129,7 +130,7 @@ public class METSDisseminationCrosswalk
|
||||
|
||||
try {
|
||||
//Return just the root Element of the METS file
|
||||
SAXBuilder builder = new SAXBuilder();
|
||||
SAXBuilder builder = XMLUtils.getSAXBuilder();
|
||||
Document metsDocument = builder.build(tempFile);
|
||||
return metsDocument.getRootElement();
|
||||
} catch (JDOMException je) {
|
||||
|
@@ -22,6 +22,7 @@ import java.util.Properties;
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
@@ -144,7 +145,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
|
||||
MODS_NS.getURI() + " " + MODS_XSD;
|
||||
|
||||
private static final XMLOutputter outputUgly = new XMLOutputter();
|
||||
private static final SAXBuilder builder = new SAXBuilder();
|
||||
private static final SAXBuilder builder = XMLUtils.getSAXBuilder();
|
||||
|
||||
private Map<String, modsTriple> modsMap = null;
|
||||
|
||||
|
@@ -20,9 +20,7 @@ import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.BitstreamFormat;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.BitstreamFormatService;
|
||||
import org.dspace.content.service.BitstreamService;
|
||||
@@ -224,29 +222,17 @@ public class PREMISCrosswalk
|
||||
// c. made-up name based on sequence ID and extension.
|
||||
String sid = String.valueOf(bitstream.getSequenceID());
|
||||
String baseUrl = configurationService.getProperty("dspace.ui.url");
|
||||
String handle = null;
|
||||
// get handle of parent Item of this bitstream, if there is one:
|
||||
List<Bundle> bn = bitstream.getBundles();
|
||||
if (bn.size() > 0) {
|
||||
List<Item> bi = bn.get(0).getItems();
|
||||
if (bi.size() > 0) {
|
||||
handle = bi.get(0).getHandle();
|
||||
}
|
||||
}
|
||||
// get or make up name for bitstream:
|
||||
String bsName = bitstream.getName();
|
||||
if (bsName == null) {
|
||||
List<String> ext = bitstream.getFormat(context).getExtensions();
|
||||
bsName = "bitstream_" + sid + (ext.size() > 0 ? ext.get(0) : "");
|
||||
}
|
||||
if (handle != null && baseUrl != null) {
|
||||
if (baseUrl != null) {
|
||||
oiv.setText(baseUrl
|
||||
+ "/bitstream/"
|
||||
+ URLEncoder.encode(handle, "UTF-8")
|
||||
+ "/"
|
||||
+ sid
|
||||
+ "/"
|
||||
+ URLEncoder.encode(bsName, "UTF-8"));
|
||||
+ "/bitstreams/"
|
||||
+ bitstream.getID()
|
||||
+ "/download");
|
||||
} else {
|
||||
oiv.setText(URLEncoder.encode(bsName, "UTF-8"));
|
||||
}
|
||||
|
@@ -22,6 +22,7 @@ import java.util.Properties;
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
@@ -125,7 +126,7 @@ public class QDCCrosswalk extends SelfNamedPlugin
|
||||
// XML schemaLocation fragment for this crosswalk, from config.
|
||||
private String schemaLocation = null;
|
||||
|
||||
private static final SAXBuilder builder = new SAXBuilder();
|
||||
private static final SAXBuilder builder = XMLUtils.getSAXBuilder();
|
||||
|
||||
protected ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
|
||||
|
@@ -13,6 +13,7 @@ import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.packager.PackageDisseminator;
|
||||
@@ -208,7 +209,7 @@ public class RoleCrosswalk
|
||||
|
||||
try {
|
||||
//Try to parse our XML results (which were disseminated by the Packager)
|
||||
SAXBuilder builder = new SAXBuilder();
|
||||
SAXBuilder builder = XMLUtils.getSAXBuilder();
|
||||
Document xmlDocument = builder.build(tempFile);
|
||||
//If XML parsed successfully, return root element of doc
|
||||
if (xmlDocument != null && xmlDocument.hasRootElement()) {
|
||||
|
@@ -18,6 +18,7 @@ import javax.xml.transform.TransformerException;
|
||||
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
@@ -297,7 +298,7 @@ public class XSLTIngestionCrosswalk
|
||||
"Failed to initialize transformer, probably error loading stylesheet.");
|
||||
}
|
||||
|
||||
SAXBuilder builder = new SAXBuilder();
|
||||
SAXBuilder builder = XMLUtils.getSAXBuilder();
|
||||
Document inDoc = builder.build(new FileInputStream(argv[i + 1]));
|
||||
XMLOutputter outputter = new XMLOutputter(Format.getPrettyFormat());
|
||||
List dimList;
|
||||
|
@@ -152,7 +152,7 @@ public class BitstreamDAOImpl extends AbstractHibernateDSODAO<Bitstream> impleme
|
||||
@Override
|
||||
public int countWithNoPolicy(Context context) throws SQLException {
|
||||
Query query = createQuery(context,
|
||||
"SELECT count(bit.id) from Bitstream bit where bit.deleted<>true and bit.id not in" +
|
||||
"SELECT count(bit.id) from Bitstream bit where bit.deleted<>true and bit not in" +
|
||||
" (select res.dSpaceObject from ResourcePolicy res where res.resourceTypeId = " +
|
||||
":typeId )");
|
||||
query.setParameter("typeId", Constants.BITSTREAM);
|
||||
|
@@ -12,6 +12,7 @@ import java.util.AbstractMap;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
import javax.persistence.Query;
|
||||
import javax.persistence.criteria.CriteriaBuilder;
|
||||
import javax.persistence.criteria.CriteriaQuery;
|
||||
@@ -19,6 +20,7 @@ import javax.persistence.criteria.Join;
|
||||
import javax.persistence.criteria.Predicate;
|
||||
import javax.persistence.criteria.Root;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.authorize.ResourcePolicy;
|
||||
import org.dspace.authorize.ResourcePolicy_;
|
||||
import org.dspace.content.Collection;
|
||||
@@ -40,6 +42,11 @@ import org.dspace.eperson.Group;
|
||||
* @author kevinvandevelde at atmire.com
|
||||
*/
|
||||
public class CollectionDAOImpl extends AbstractHibernateDSODAO<Collection> implements CollectionDAO {
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(CollectionDAOImpl.class);
|
||||
|
||||
protected CollectionDAOImpl() {
|
||||
super();
|
||||
}
|
||||
@@ -159,7 +166,7 @@ public class CollectionDAOImpl extends AbstractHibernateDSODAO<Collection> imple
|
||||
|
||||
@Override
|
||||
public List<Collection> findCollectionsWithSubscribers(Context context) throws SQLException {
|
||||
return list(createQuery(context, "SELECT DISTINCT c FROM Collection c JOIN Subscription s ON c.id = " +
|
||||
return list(createQuery(context, "SELECT DISTINCT c FROM Collection c JOIN Subscription s ON c = " +
|
||||
"s.dSpaceObject"));
|
||||
}
|
||||
|
||||
@@ -172,14 +179,25 @@ public class CollectionDAOImpl extends AbstractHibernateDSODAO<Collection> imple
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<Map.Entry<Collection, Long>> getCollectionsWithBitstreamSizesTotal(Context context)
|
||||
throws SQLException {
|
||||
String q = "select col as collection, sum(bit.sizeBytes) as totalBytes from Item i join i.collections col " +
|
||||
"join i.bundles bun join bun.bitstreams bit group by col";
|
||||
String q = "select col.id, sum(bit.sizeBytes) as totalBytes from Item i join i.collections col " +
|
||||
"join i.bundles bun join bun.bitstreams bit group by col.id";
|
||||
Query query = createQuery(context, q);
|
||||
|
||||
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||
|
||||
List<Object[]> list = query.getResultList();
|
||||
List<Map.Entry<Collection, Long>> returnList = new ArrayList<>(list.size());
|
||||
for (Object[] o : list) {
|
||||
returnList.add(new AbstractMap.SimpleEntry<>((Collection) o[0], (Long) o[1]));
|
||||
CriteriaQuery<Collection> criteriaQuery = criteriaBuilder.createQuery(Collection.class);
|
||||
Root<Collection> collectionRoot = criteriaQuery.from(Collection.class);
|
||||
criteriaQuery.select(collectionRoot).where(criteriaBuilder.equal(collectionRoot.get("id"), (UUID) o[0]));
|
||||
Query collectionQuery = createQuery(context, criteriaQuery);
|
||||
Collection collection = (Collection) collectionQuery.getSingleResult();
|
||||
if (collection != null) {
|
||||
returnList.add(new AbstractMap.SimpleEntry<>(collection, (Long) o[1]));
|
||||
} else {
|
||||
log.warn("Unable to find Collection with UUID: {}", o[0]);
|
||||
}
|
||||
}
|
||||
return returnList;
|
||||
}
|
||||
|
@@ -11,8 +11,6 @@ import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
@@ -21,7 +19,11 @@ import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipFile;
|
||||
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.client.DSpaceHttpClientFactory;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.BitstreamFormat;
|
||||
@@ -496,8 +498,11 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester {
|
||||
// Finish creating the item. This actually assigns the handle,
|
||||
// and will either install item immediately or start a workflow, based on params
|
||||
PackageUtils.finishCreateItem(context, wsi, handle, params);
|
||||
} else {
|
||||
// We should have a workspace item during ingest, so this code is only here for safety.
|
||||
// Update the object to make sure all changes are committed
|
||||
PackageUtils.updateDSpaceObject(context, dso);
|
||||
}
|
||||
|
||||
} else if (type == Constants.COLLECTION || type == Constants.COMMUNITY) {
|
||||
// Add logo if one is referenced from manifest
|
||||
addContainerLogo(context, dso, manifest, pkgFile, params);
|
||||
@@ -511,6 +516,9 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester {
|
||||
// (this allows subclasses to do some final validation / changes as
|
||||
// necessary)
|
||||
finishObject(context, dso, params);
|
||||
|
||||
// Update the object to make sure all changes are committed
|
||||
PackageUtils.updateDSpaceObject(context, dso);
|
||||
} else if (type == Constants.SITE) {
|
||||
// Do nothing by default -- Crosswalks will handle anything necessary to ingest at Site-level
|
||||
|
||||
@@ -518,18 +526,15 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester {
|
||||
// (this allows subclasses to do some final validation / changes as
|
||||
// necessary)
|
||||
finishObject(context, dso, params);
|
||||
|
||||
// Update the object to make sure all changes are committed
|
||||
PackageUtils.updateDSpaceObject(context, dso);
|
||||
} else {
|
||||
throw new PackageValidationException(
|
||||
"Unknown DSpace Object type in package, type="
|
||||
+ String.valueOf(type));
|
||||
}
|
||||
|
||||
// -- Step 6 --
|
||||
// Finish things up!
|
||||
|
||||
// Update the object to make sure all changes are committed
|
||||
PackageUtils.updateDSpaceObject(context, dso);
|
||||
|
||||
return dso;
|
||||
}
|
||||
|
||||
@@ -1310,13 +1315,12 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester {
|
||||
if (params.getBooleanProperty("manifestOnly", false)) {
|
||||
// NOTE: since we are only dealing with a METS manifest,
|
||||
// we will assume all external files are available via URLs.
|
||||
try {
|
||||
try (CloseableHttpClient httpClient = DSpaceHttpClientFactory.getInstance().build()) {
|
||||
// attempt to open a connection to given URL
|
||||
URL fileURL = new URL(path);
|
||||
URLConnection connection = fileURL.openConnection();
|
||||
|
||||
// open stream to access file contents
|
||||
return connection.getInputStream();
|
||||
try (CloseableHttpResponse httpResponse = httpClient.execute(new HttpGet(path))) {
|
||||
// open stream to access file contents
|
||||
return httpResponse.getEntity().getContent();
|
||||
}
|
||||
} catch (IOException io) {
|
||||
log
|
||||
.error("Unable to retrieve external file from URL '"
|
||||
|
@@ -20,6 +20,7 @@ import java.util.List;
|
||||
import org.apache.commons.codec.binary.Base64;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.Bundle;
|
||||
@@ -265,12 +266,13 @@ public class METSManifest {
|
||||
public static METSManifest create(InputStream is, boolean validate, String configName)
|
||||
throws IOException,
|
||||
MetadataValidationException {
|
||||
SAXBuilder builder = new SAXBuilder(validate);
|
||||
|
||||
SAXBuilder builder = XMLUtils.getSAXBuilder();
|
||||
builder.setIgnoringElementContentWhitespace(true);
|
||||
|
||||
// Set validation feature
|
||||
if (validate) {
|
||||
builder.setValidation(true);
|
||||
builder.setFeature("http://apache.org/xml/features/validation/schema", true);
|
||||
|
||||
// Tell the parser where local copies of schemas are, to speed up
|
||||
@@ -278,10 +280,6 @@ public class METSManifest {
|
||||
if (localSchemas.length() > 0) {
|
||||
builder.setProperty("http://apache.org/xml/properties/schema/external-schemaLocation", localSchemas);
|
||||
}
|
||||
} else {
|
||||
// disallow DTD parsing to ensure no XXE attacks can occur.
|
||||
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
|
||||
builder.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
}
|
||||
|
||||
// Parse the METS file
|
||||
|
@@ -19,6 +19,7 @@ import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
|
||||
import org.apache.commons.codec.DecoderException;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
@@ -386,7 +387,7 @@ public class RoleIngester implements PackageIngester {
|
||||
Document document;
|
||||
|
||||
try {
|
||||
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||
DocumentBuilderFactory dbf = XMLUtils.getDocumentBuilderFactory();
|
||||
dbf.setIgnoringComments(true);
|
||||
dbf.setCoalescing(true);
|
||||
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||
@@ -420,7 +421,7 @@ public class RoleIngester implements PackageIngester {
|
||||
Document document;
|
||||
|
||||
try {
|
||||
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||
DocumentBuilderFactory dbf = XMLUtils.getDocumentBuilderFactory();
|
||||
dbf.setIgnoringComments(true);
|
||||
dbf.setCoalescing(true);
|
||||
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||
|
@@ -202,7 +202,7 @@ public interface DSpaceObjectService<T extends DSpaceObject> {
|
||||
* Get the value(s) of a metadata field.
|
||||
* @param dSpaceObject the object whose metadata are sought.
|
||||
* @param mdString the name of the field: {@code schema.element.qualifier}.
|
||||
* @param authority name of the authority which controls these values, or null.
|
||||
* @param authority name of the authority which controls these values, or Item.ANY, or null.
|
||||
* @return all matching metadata values, or null if none.
|
||||
*/
|
||||
public List<MetadataValue> getMetadata(T dSpaceObject, String mdString, String authority);
|
||||
@@ -216,7 +216,7 @@ public interface DSpaceObjectService<T extends DSpaceObject> {
|
||||
* @param lang the language of the requested field value(s),
|
||||
* null if explicitly no language,
|
||||
* or {@link org.dspace.content.Item.ANY} to match all languages.
|
||||
* @param authority name of the authority which controls these values, or null.
|
||||
* @param authority name of the authority which controls these values, or Item.ANY, or null.
|
||||
* @return value(s) of the indicated field for the given DSO, or null.
|
||||
*/
|
||||
public List<MetadataValue> getMetadata(T dSpaceObject, String schema,
|
||||
|
@@ -44,7 +44,8 @@ public interface MetadataDSpaceCsvExportService {
|
||||
* @return A DSpaceCSV object containing the exported information
|
||||
* @throws Exception If something goes wrong
|
||||
*/
|
||||
public DSpaceCSV export(Context context, Iterator<Item> toExport, boolean exportAll) throws Exception;
|
||||
public DSpaceCSV export(Context context, Iterator<Item> toExport,
|
||||
boolean exportAll, DSpaceRunnableHandler handler) throws Exception;
|
||||
|
||||
/**
|
||||
* This method will export all the Items within the given Community to a DSpaceCSV
|
||||
@@ -54,6 +55,9 @@ public interface MetadataDSpaceCsvExportService {
|
||||
* @return A DSpaceCSV object containing the exported information
|
||||
* @throws Exception If something goes wrong
|
||||
*/
|
||||
public DSpaceCSV export(Context context, Community community, boolean exportAll) throws Exception;
|
||||
public DSpaceCSV export(Context context, Community community,
|
||||
boolean exportAll, DSpaceRunnableHandler handler) throws Exception;
|
||||
|
||||
}
|
||||
int getCsvExportLimit();
|
||||
|
||||
}
|
||||
|
@@ -458,7 +458,20 @@ public abstract class AbstractHibernateDAO<T> implements GenericDAO<T> {
|
||||
for (Map.Entry<String, Object> entry : equals.entrySet()) {
|
||||
criteria.where(criteriaBuilder.equal(root.get(entry.getKey()), entry.getValue()));
|
||||
}
|
||||
|
||||
criteria.orderBy(criteriaBuilder.asc(root.get("id")));
|
||||
|
||||
return executeCriteriaQuery(context, criteria, cacheable, maxResults, offset);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a Query object from a CriteriaQuery
|
||||
* @param context current Context
|
||||
* @param criteriaQuery CriteriaQuery built via CriteriaBuilder
|
||||
* @return corresponding Query
|
||||
* @throws SQLException if error occurs
|
||||
*/
|
||||
public Query createQuery(Context context, CriteriaQuery criteriaQuery) throws SQLException {
|
||||
return this.getHibernateSession(context).createQuery(criteriaQuery);
|
||||
}
|
||||
}
|
||||
|
@@ -506,4 +506,5 @@ public final class Utils {
|
||||
ConfigurationService config = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
return StringSubstitutor.replace(string, config.getProperties());
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -9,11 +9,15 @@ package org.dspace.ctask.general;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.http.client.config.RequestConfig;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.client.DSpaceHttpClientFactory;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataValue;
|
||||
@@ -135,24 +139,20 @@ public class BasicLinkChecker extends AbstractCurationTask {
|
||||
* @return The HTTP response code (e.g. 200 / 301 / 404 / 500)
|
||||
*/
|
||||
protected int getResponseStatus(String url, int redirects) {
|
||||
try {
|
||||
URL theURL = new URL(url);
|
||||
HttpURLConnection connection = (HttpURLConnection) theURL.openConnection();
|
||||
connection.setInstanceFollowRedirects(true);
|
||||
int statusCode = connection.getResponseCode();
|
||||
RequestConfig config = RequestConfig.custom().setRedirectsEnabled(true).build();
|
||||
try (CloseableHttpClient httpClient = DSpaceHttpClientFactory.getInstance().buildWithRequestConfig(config)) {
|
||||
CloseableHttpResponse httpResponse = httpClient.execute(new HttpGet(url));
|
||||
int statusCode = httpResponse.getStatusLine().getStatusCode();
|
||||
int maxRedirect = configurationService.getIntProperty("curate.checklinks.max-redirect", 0);
|
||||
if ((statusCode == HttpURLConnection.HTTP_MOVED_TEMP || statusCode == HttpURLConnection.HTTP_MOVED_PERM ||
|
||||
statusCode == HttpURLConnection.HTTP_SEE_OTHER)) {
|
||||
connection.disconnect();
|
||||
String newUrl = connection.getHeaderField("Location");
|
||||
String newUrl = httpResponse.getFirstHeader("Location").getValue();
|
||||
if (newUrl != null && (maxRedirect >= redirects || maxRedirect == -1)) {
|
||||
redirects++;
|
||||
return getResponseStatus(newUrl, redirects);
|
||||
}
|
||||
|
||||
}
|
||||
return statusCode;
|
||||
|
||||
} catch (IOException ioe) {
|
||||
// Must be a bad URL
|
||||
log.debug("Bad link: " + ioe.getMessage());
|
||||
|
@@ -30,13 +30,14 @@ import javax.xml.xpath.XPathExpressionException;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.HttpStatus;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.client.DSpaceHttpClientFactory;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
@@ -176,7 +177,7 @@ public class MetadataWebService extends AbstractCurationTask implements Namespac
|
||||
fieldSeparator = (fldSep != null) ? fldSep : " ";
|
||||
urlTemplate = taskProperty("template");
|
||||
templateParam = urlTemplate.substring(urlTemplate.indexOf("{") + 1,
|
||||
urlTemplate.indexOf("}"));
|
||||
urlTemplate.indexOf("}"));
|
||||
String[] parsed = parseTransform(templateParam);
|
||||
lookupField = parsed[0];
|
||||
lookupTransform = parsed[1];
|
||||
@@ -204,13 +205,9 @@ public class MetadataWebService extends AbstractCurationTask implements Namespac
|
||||
}
|
||||
}
|
||||
// initialize response document parser
|
||||
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
|
||||
factory.setNamespaceAware(true);
|
||||
try {
|
||||
// disallow DTD parsing to ensure no XXE attacks can occur
|
||||
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
|
||||
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
factory.setXIncludeAware(false);
|
||||
DocumentBuilderFactory factory = XMLUtils.getDocumentBuilderFactory();
|
||||
factory.setNamespaceAware(true);
|
||||
docBuilder = factory.newDocumentBuilder();
|
||||
} catch (ParserConfigurationException pcE) {
|
||||
log.error("caught exception: " + pcE);
|
||||
@@ -255,53 +252,50 @@ public class MetadataWebService extends AbstractCurationTask implements Namespac
|
||||
}
|
||||
|
||||
protected int callService(String value, Item item, StringBuilder resultSb) throws IOException {
|
||||
|
||||
String callUrl = urlTemplate.replaceAll("\\{" + templateParam + "\\}", value);
|
||||
CloseableHttpClient client = HttpClientBuilder.create().build();
|
||||
HttpGet req = new HttpGet(callUrl);
|
||||
for (Map.Entry<String, String> entry : headers.entrySet()) {
|
||||
req.addHeader(entry.getKey(), entry.getValue());
|
||||
}
|
||||
HttpResponse resp = client.execute(req);
|
||||
int status = Curator.CURATE_ERROR;
|
||||
int statusCode = resp.getStatusLine().getStatusCode();
|
||||
if (statusCode == HttpStatus.SC_OK) {
|
||||
HttpEntity entity = resp.getEntity();
|
||||
if (entity != null) {
|
||||
// boiler-plate handling taken from Apache 4.1 javadoc
|
||||
InputStream instream = entity.getContent();
|
||||
try {
|
||||
// This next line triggers a false-positive XXE warning from LGTM, even though we disallow DTD
|
||||
// parsing during initialization of docBuilder in init()
|
||||
Document doc = docBuilder.parse(instream); // lgtm [java/xxe]
|
||||
status = processResponse(doc, item, resultSb);
|
||||
} catch (SAXException saxE) {
|
||||
log.error("caught exception: " + saxE);
|
||||
resultSb.append(" unable to read response document");
|
||||
} catch (RuntimeException ex) {
|
||||
// In case of an unexpected exception you may want to abort
|
||||
// the HTTP request in order to shut down the underlying
|
||||
// connection and release it back to the connection manager.
|
||||
req.abort();
|
||||
log.error("caught exception: " + ex);
|
||||
throw ex;
|
||||
} finally {
|
||||
// Closing the input stream will trigger connection release
|
||||
instream.close();
|
||||
}
|
||||
// When HttpClient instance is no longer needed,
|
||||
// shut down the connection manager to ensure
|
||||
// immediate deallocation of all system resources
|
||||
client.close();
|
||||
} else {
|
||||
log.error(" obtained no valid service response");
|
||||
resultSb.append("no service response");
|
||||
try (CloseableHttpClient client = DSpaceHttpClientFactory.getInstance().build()) {
|
||||
HttpGet req = new HttpGet(callUrl);
|
||||
for (Map.Entry<String, String> entry : headers.entrySet()) {
|
||||
req.addHeader(entry.getKey(), entry.getValue());
|
||||
}
|
||||
try (CloseableHttpResponse resp = client.execute(req)) {
|
||||
int status = Curator.CURATE_ERROR;
|
||||
int statusCode = resp.getStatusLine().getStatusCode();
|
||||
if (statusCode == HttpStatus.SC_OK) {
|
||||
HttpEntity entity = resp.getEntity();
|
||||
if (entity != null) {
|
||||
// boiler-plate handling taken from Apache 4.1 javadoc
|
||||
InputStream instream = entity.getContent();
|
||||
try {
|
||||
// This next line triggers a false-positive XXE warning from LGTM, even though
|
||||
// we disallow DTD parsing during initialization of docBuilder in init()
|
||||
Document doc = docBuilder.parse(instream); // lgtm [java/xxe]
|
||||
status = processResponse(doc, item, resultSb);
|
||||
} catch (SAXException saxE) {
|
||||
log.error("caught exception: " + saxE);
|
||||
resultSb.append(" unable to read response document");
|
||||
} catch (RuntimeException ex) {
|
||||
// In case of an unexpected exception you may want to abort
|
||||
// the HTTP request in order to shut down the underlying
|
||||
// connection and release it back to the connection manager.
|
||||
req.abort();
|
||||
log.error("caught exception: " + ex);
|
||||
throw ex;
|
||||
} finally {
|
||||
// Closing the input stream will trigger connection release
|
||||
instream.close();
|
||||
}
|
||||
} else {
|
||||
log.error(" obtained no valid service response");
|
||||
resultSb.append("no service response");
|
||||
}
|
||||
} else {
|
||||
log.error("service returned non-OK status: " + statusCode);
|
||||
resultSb.append("no service response");
|
||||
}
|
||||
return status;
|
||||
}
|
||||
} else {
|
||||
log.error("service returned non-OK status: " + statusCode);
|
||||
resultSb.append("no service response");
|
||||
}
|
||||
return status;
|
||||
}
|
||||
|
||||
protected int processResponse(Document doc, Item item, StringBuilder resultSb) throws IOException {
|
||||
|
@@ -12,12 +12,12 @@ import java.net.URLEncoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.client.DSpaceHttpClientFactory;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
@@ -60,22 +60,20 @@ public class MicrosoftTranslator extends AbstractTranslator {
|
||||
String url = baseUrl + "?appId=" + apiKey;
|
||||
url += "&to=" + to + "&from=" + from + "&text=" + text;
|
||||
|
||||
try (CloseableHttpClient client = HttpClientBuilder.create().build()) {
|
||||
try (CloseableHttpClient client = DSpaceHttpClientFactory.getInstance().build()) {
|
||||
HttpGet hm = new HttpGet(url);
|
||||
HttpResponse httpResponse = client.execute(hm);
|
||||
log.debug("Response code from API call is " + httpResponse);
|
||||
|
||||
if (httpResponse.getStatusLine().getStatusCode() == 200) {
|
||||
String response = IOUtils.toString(httpResponse.getEntity().getContent(),
|
||||
StandardCharsets.ISO_8859_1);
|
||||
response = response
|
||||
.replaceAll("<string xmlns=\"http://schemas.microsoft.com/2003/10/Serialization/\">", "");
|
||||
response = response.replaceAll("</string>", "");
|
||||
translatedText = response;
|
||||
try (CloseableHttpResponse httpResponse = client.execute(hm)) {
|
||||
log.debug("Response code from API call is " + httpResponse);
|
||||
if (httpResponse.getStatusLine().getStatusCode() == 200) {
|
||||
String response = IOUtils.toString(httpResponse.getEntity().getContent(),
|
||||
StandardCharsets.ISO_8859_1);
|
||||
response = response
|
||||
.replaceAll("<string xmlns=\"http://schemas.microsoft.com/2003/10/Serialization/\">", "");
|
||||
response = response.replaceAll("</string>", "");
|
||||
translatedText = response;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return translatedText;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -24,6 +24,8 @@ import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.io.output.NullOutputStream;
|
||||
import org.dspace.app.util.DSpaceObjectUtilsImpl;
|
||||
import org.dspace.app.util.service.DSpaceObjectUtils;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
@@ -35,6 +37,7 @@ import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.utils.DSpace;
|
||||
|
||||
/**
|
||||
@@ -45,7 +48,9 @@ import org.dspace.utils.DSpace;
|
||||
public class Curation extends DSpaceRunnable<CurationScriptConfiguration> {
|
||||
|
||||
protected EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
|
||||
protected DSpaceObjectUtils dspaceObjectUtils = DSpaceServicesFactory.getInstance().getServiceManager()
|
||||
.getServiceByName(DSpaceObjectUtilsImpl.class.getName(), DSpaceObjectUtilsImpl.class);
|
||||
HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
|
||||
protected Context context;
|
||||
private CurationClientOptions curationClientOptions;
|
||||
|
||||
@@ -345,9 +350,29 @@ public class Curation extends DSpaceRunnable<CurationScriptConfiguration> {
|
||||
|
||||
if (this.commandLine.hasOption('i')) {
|
||||
this.id = this.commandLine.getOptionValue('i').toLowerCase();
|
||||
DSpaceObject dso;
|
||||
if (!this.id.equalsIgnoreCase("all")) {
|
||||
HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
|
||||
DSpaceObject dso;
|
||||
// First, try to parse the id as a UUID. If that fails, treat it as a handle.
|
||||
UUID uuid = null;
|
||||
try {
|
||||
uuid = UUID.fromString(id);
|
||||
} catch (Exception e) {
|
||||
// It's not a UUID, proceed to treat it as a handle.
|
||||
}
|
||||
if (uuid != null) {
|
||||
try {
|
||||
dso = dspaceObjectUtils.findDSpaceObject(context, uuid);
|
||||
if (dso != null) {
|
||||
// We already resolved an object, return early
|
||||
return;
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
String error = "SQLException trying to find dso with uuid " + uuid;
|
||||
super.handler.logError(error);
|
||||
throw new RuntimeException(error, e);
|
||||
}
|
||||
}
|
||||
// If we get here, the id is not a UUID, so we assume it's a handle.
|
||||
try {
|
||||
dso = handleService.resolveToObject(this.context, id);
|
||||
} catch (SQLException e) {
|
||||
|
@@ -88,9 +88,11 @@ public class SolrSearchCore {
|
||||
solrServer.setBaseURL(solrService);
|
||||
solrServer.setUseMultiPartPost(true);
|
||||
// Dummy/test query to search for Item (type=2) of ID=1
|
||||
SolrQuery solrQuery = new SolrQuery()
|
||||
.setQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":" + IndexableItem.TYPE +
|
||||
" AND " + SearchUtils.RESOURCE_ID_FIELD + ":1");
|
||||
SolrQuery solrQuery = new SolrQuery();
|
||||
solrQuery.setQuery("*:*");
|
||||
solrQuery.addFilterQuery(
|
||||
SearchUtils.RESOURCE_TYPE_FIELD + ":" + IndexableItem.TYPE,
|
||||
SearchUtils.RESOURCE_ID_FIELD + ":1");
|
||||
// Only return obj identifier fields in result doc
|
||||
solrQuery.setFields(SearchUtils.RESOURCE_TYPE_FIELD, SearchUtils.RESOURCE_ID_FIELD);
|
||||
solrServer.query(solrQuery, REQUEST_METHOD);
|
||||
|
@@ -52,21 +52,23 @@ public class SolrServiceFileInfoPlugin implements SolrServiceIndexPlugin {
|
||||
List<Bitstream> bitstreams = bundle.getBitstreams();
|
||||
if (bitstreams != null) {
|
||||
for (Bitstream bitstream : bitstreams) {
|
||||
document.addField(SOLR_FIELD_NAME_FOR_FILENAMES, bitstream.getName());
|
||||
// Add _keyword and _filter fields which are necessary to support filtering and faceting
|
||||
// for the file names
|
||||
document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_keyword", bitstream.getName());
|
||||
document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_filter", bitstream.getName());
|
||||
if (bitstream != null) {
|
||||
document.addField(SOLR_FIELD_NAME_FOR_FILENAMES, bitstream.getName());
|
||||
// Add _keyword and _filter fields which are necessary to
|
||||
// support filtering and faceting for the file names
|
||||
document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_keyword", bitstream.getName());
|
||||
document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_filter", bitstream.getName());
|
||||
|
||||
String description = bitstream.getDescription();
|
||||
if ((description != null) && !description.isEmpty()) {
|
||||
document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS, description);
|
||||
// Add _keyword and _filter fields which are necessary to support filtering and
|
||||
// faceting for the descriptions
|
||||
document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_keyword",
|
||||
description);
|
||||
document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_filter",
|
||||
description);
|
||||
String description = bitstream.getDescription();
|
||||
if ((description != null) && !description.isEmpty()) {
|
||||
document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS, description);
|
||||
// Add _keyword and _filter fields which are necessary to support filtering and
|
||||
// faceting for the descriptions
|
||||
document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_keyword",
|
||||
description);
|
||||
document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_filter",
|
||||
description);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -41,6 +41,9 @@ import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.client.solrj.response.FacetField;
|
||||
import org.apache.solr.client.solrj.response.QueryResponse;
|
||||
import org.apache.solr.client.solrj.response.json.BucketBasedJsonFacet;
|
||||
import org.apache.solr.client.solrj.response.json.BucketJsonFacet;
|
||||
import org.apache.solr.client.solrj.response.json.NestableJsonFacet;
|
||||
import org.apache.solr.client.solrj.util.ClientUtils;
|
||||
import org.apache.solr.common.SolrDocument;
|
||||
import org.apache.solr.common.SolrDocumentList;
|
||||
@@ -72,6 +75,7 @@ import org.dspace.discovery.indexobject.IndexableCommunity;
|
||||
import org.dspace.discovery.indexobject.IndexableItem;
|
||||
import org.dspace.discovery.indexobject.factory.IndexFactory;
|
||||
import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory;
|
||||
import org.dspace.discovery.indexobject.factory.ItemIndexFactory;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.GroupService;
|
||||
@@ -341,6 +345,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
try {
|
||||
final List<IndexFactory> indexableObjectServices = indexObjectServiceFactory.
|
||||
getIndexFactories();
|
||||
int indexObject = 0;
|
||||
for (IndexFactory indexableObjectService : indexableObjectServices) {
|
||||
if (type == null || StringUtils.equals(indexableObjectService.getType(), type)) {
|
||||
final Iterator<IndexableObject> indexableObjects = indexableObjectService.findAll(context);
|
||||
@@ -348,6 +353,10 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
final IndexableObject indexableObject = indexableObjects.next();
|
||||
indexContent(context, indexableObject, force);
|
||||
context.uncacheEntity(indexableObject.getIndexedObject());
|
||||
indexObject++;
|
||||
if ((indexObject % 100) == 0 && indexableObjectService instanceof ItemIndexFactory) {
|
||||
context.uncacheEntities();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -852,16 +861,20 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
|
||||
solrQuery.setQuery(query);
|
||||
|
||||
// Add any search fields to our query. This is the limited list
|
||||
// of fields that will be returned in the solr result
|
||||
for (String fieldName : discoveryQuery.getSearchFields()) {
|
||||
solrQuery.addField(fieldName);
|
||||
if (discoveryQuery.getMaxResults() != 0) {
|
||||
// set search fields in Solr query only if we are interested in the actual search results
|
||||
|
||||
// Add any search fields to our query. This is the limited list
|
||||
// of fields that will be returned in the solr result
|
||||
for (String fieldName : discoveryQuery.getSearchFields()) {
|
||||
solrQuery.addField(fieldName);
|
||||
}
|
||||
// Also ensure a few key obj identifier fields are returned with every query
|
||||
solrQuery.addField(SearchUtils.RESOURCE_TYPE_FIELD);
|
||||
solrQuery.addField(SearchUtils.RESOURCE_ID_FIELD);
|
||||
solrQuery.addField(SearchUtils.RESOURCE_UNIQUE_ID);
|
||||
solrQuery.addField(STATUS_FIELD);
|
||||
}
|
||||
// Also ensure a few key obj identifier fields are returned with every query
|
||||
solrQuery.addField(SearchUtils.RESOURCE_TYPE_FIELD);
|
||||
solrQuery.addField(SearchUtils.RESOURCE_ID_FIELD);
|
||||
solrQuery.addField(SearchUtils.RESOURCE_UNIQUE_ID);
|
||||
solrQuery.addField(STATUS_FIELD);
|
||||
|
||||
if (discoveryQuery.isSpellCheck()) {
|
||||
solrQuery.setParam(SpellingParams.SPELLCHECK_Q, query);
|
||||
@@ -1055,8 +1068,8 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
}
|
||||
//Resolve our facet field values
|
||||
resolveFacetFields(context, query, result, skipLoadingResponse, solrQueryResponse);
|
||||
//Add total entries count for metadata browsing
|
||||
resolveEntriesCount(result, solrQueryResponse);
|
||||
//Resolve our json facet field values used for metadata browsing
|
||||
resolveJsonFacetFields(context, result, solrQueryResponse);
|
||||
}
|
||||
// If any stale entries are found in the current page of results,
|
||||
// we remove those stale entries and rerun the same query again.
|
||||
@@ -1083,35 +1096,38 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores the total count of entries for metadata index browsing. The count is calculated by the
|
||||
* <code>json.facet</code> parameter with the following value:
|
||||
* Process the 'json.facet' response, which is currently only used for metadata browsing
|
||||
*
|
||||
* <pre><code>
|
||||
* {
|
||||
* "entries_count": {
|
||||
* "type": "terms",
|
||||
* "field": "facetNameField_filter",
|
||||
* "limit": 0,
|
||||
* "prefix": "prefix_value",
|
||||
* "numBuckets": true
|
||||
* }
|
||||
* }
|
||||
* </code></pre>
|
||||
*
|
||||
* This value is returned in the <code>facets</code> field of the Solr response.
|
||||
*
|
||||
* @param result DiscoverResult object where the total entries count will be stored
|
||||
* @param solrQueryResponse QueryResponse object containing the solr response
|
||||
* @param context context object
|
||||
* @param result the result object to add the facet results to
|
||||
* @param solrQueryResponse the solr query response
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
private void resolveEntriesCount(DiscoverResult result, QueryResponse solrQueryResponse) {
|
||||
private void resolveJsonFacetFields(Context context, DiscoverResult result, QueryResponse solrQueryResponse)
|
||||
throws SQLException {
|
||||
|
||||
Object facetsObj = solrQueryResponse.getResponse().get("facets");
|
||||
if (facetsObj instanceof NamedList) {
|
||||
NamedList<Object> facets = (NamedList<Object>) facetsObj;
|
||||
Object bucketsInfoObj = facets.get("entries_count");
|
||||
if (bucketsInfoObj instanceof NamedList) {
|
||||
NamedList<Object> bucketsInfo = (NamedList<Object>) bucketsInfoObj;
|
||||
result.setTotalEntries((int) bucketsInfo.get("numBuckets"));
|
||||
NestableJsonFacet response = solrQueryResponse.getJsonFacetingResponse();
|
||||
if (response != null && response.getBucketBasedFacetNames() != null) {
|
||||
for (String facetName : response.getBucketBasedFacetNames()) {
|
||||
BucketBasedJsonFacet facet = response.getBucketBasedFacets(facetName);
|
||||
if (facet != null) {
|
||||
result.setTotalEntries(facet.getNumBucketsCount());
|
||||
for (BucketJsonFacet bucket : facet.getBuckets()) {
|
||||
String facetValue = bucket.getVal() != null ? bucket.getVal().toString() : "";
|
||||
String field = facetName + "_filter";
|
||||
String displayedValue = transformDisplayedValue(context, field, facetValue);
|
||||
String authorityValue = transformAuthorityValue(context, field, facetValue);
|
||||
String sortValue = transformSortValue(context, field, facetValue);
|
||||
String filterValue = displayedValue;
|
||||
if (StringUtils.isNotBlank(authorityValue)) {
|
||||
filterValue = authorityValue;
|
||||
}
|
||||
result.addFacetResult(facetName,
|
||||
new DiscoverResult.FacetResult(filterValue, displayedValue,
|
||||
authorityValue, sortValue, bucket.getCount(),
|
||||
DiscoveryConfigurationParameters.TYPE_TEXT));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -10,8 +10,10 @@ package org.dspace.discovery.configuration;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
@@ -197,15 +199,19 @@ public class DiscoveryConfigurationService {
|
||||
}
|
||||
|
||||
/**
|
||||
* @return All configurations for {@link org.dspace.discovery.configuration.DiscoverySearchFilterFacet}
|
||||
* Get the unique set of configured Discovery facets. This is used when inspecting configuration
|
||||
* to include hierarchical vocabularies in the browse menu.
|
||||
*
|
||||
* @return All unique instances of {@link org.dspace.discovery.configuration.DiscoverySearchFilterFacet}
|
||||
* included in "sidebarFacets" bean, across all Discovery configurations.
|
||||
*/
|
||||
public List<DiscoverySearchFilterFacet> getAllFacetsConfig() {
|
||||
List<DiscoverySearchFilterFacet> configs = new ArrayList<>();
|
||||
public List<DiscoverySearchFilterFacet> getAllUniqueFacetsConfig() {
|
||||
Set<DiscoverySearchFilterFacet> configs = new LinkedHashSet<>();
|
||||
for (String key : map.keySet()) {
|
||||
DiscoveryConfiguration config = map.get(key);
|
||||
configs.addAll(config.getSidebarFacets());
|
||||
}
|
||||
return configs;
|
||||
return new ArrayList<>(configs);
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
@@ -302,7 +302,7 @@ public class DiscoverQueryBuilder implements InitializingBean {
|
||||
|
||||
if (StringUtils.isNotBlank(sortBy) && !isConfigured(sortBy, searchSortConfiguration)) {
|
||||
throw new SearchServiceException(
|
||||
"The field: " + sortBy + "is not configured for the configuration!");
|
||||
"The field: " + sortBy + " is not configured for the configuration!");
|
||||
}
|
||||
|
||||
|
||||
|
@@ -139,8 +139,8 @@ public class CitationDocumentServiceImpl implements CitationDocumentService, Ini
|
||||
|
||||
//Load enabled collections
|
||||
String[] citationEnabledCollections = configurationService
|
||||
.getArrayProperty("citation-page.enabled_collections");
|
||||
citationEnabledCollectionsList = Arrays.asList(citationEnabledCollections);
|
||||
.getArrayProperty("citation-page.enabled_collections");
|
||||
citationEnabledCollectionsList = new ArrayList<String>(Arrays.asList(citationEnabledCollections));
|
||||
|
||||
//Load enabled communities, and add to collection-list
|
||||
String[] citationEnabledCommunities = configurationService
|
||||
|
@@ -17,15 +17,15 @@ import java.util.regex.Pattern;
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.NameValuePair;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.http.client.entity.UrlEncodedFormEntity;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.message.BasicNameValuePair;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.client.DSpaceHttpClientFactory;
|
||||
import org.dspace.eperson.service.CaptchaService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
@@ -82,18 +82,17 @@ public class CaptchaServiceImpl implements CaptchaService {
|
||||
throw new RuntimeException(e.getMessage(), e);
|
||||
}
|
||||
|
||||
HttpClient httpClient = HttpClientBuilder.create().build();
|
||||
HttpResponse httpResponse;
|
||||
GoogleCaptchaResponse googleResponse;
|
||||
final ObjectMapper objectMapper = new ObjectMapper();
|
||||
try {
|
||||
httpResponse = httpClient.execute(httpPost);
|
||||
googleResponse = objectMapper.readValue(httpResponse.getEntity().getContent(), GoogleCaptchaResponse.class);
|
||||
try (CloseableHttpClient httpClient = DSpaceHttpClientFactory.getInstance().build()) {
|
||||
final ObjectMapper objectMapper = new ObjectMapper();
|
||||
try (CloseableHttpResponse httpResponse = httpClient.execute(httpPost)) {
|
||||
GoogleCaptchaResponse googleResponse = objectMapper.readValue(httpResponse.getEntity().getContent(),
|
||||
GoogleCaptchaResponse.class);
|
||||
validateGoogleResponse(googleResponse, action);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
log.error(e.getMessage(), e);
|
||||
throw new RuntimeException("Error during verify google recaptcha site", e);
|
||||
}
|
||||
validateGoogleResponse(googleResponse, action);
|
||||
}
|
||||
|
||||
private boolean responseSanityCheck(String response) {
|
||||
|
@@ -373,7 +373,7 @@ public class EPerson extends DSpaceObject implements DSpaceObjectLegacySupport {
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return getEmail();
|
||||
return this.getFullName();
|
||||
}
|
||||
|
||||
String getDigestAlgorithm() {
|
||||
|
@@ -131,7 +131,8 @@ public class SubscribeServiceImpl implements SubscribeService {
|
||||
|
||||
@Override
|
||||
public boolean isSubscribed(Context context, EPerson eperson, DSpaceObject dSpaceObject) throws SQLException {
|
||||
return subscriptionDAO.findByEPersonAndDso(context, eperson, dSpaceObject, -1, -1) != null;
|
||||
List<Subscription> subscriptions = subscriptionDAO.findByEPersonAndDso(context, eperson, dSpaceObject, -1, -1);
|
||||
return subscriptions != null && !subscriptions.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -28,13 +28,13 @@ import org.apache.http.HttpStatus;
|
||||
import org.apache.http.NameValuePair;
|
||||
import org.apache.http.NoHttpResponseException;
|
||||
import org.apache.http.StatusLine;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.http.client.entity.UrlEncodedFormEntity;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.message.BasicNameValuePair;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.client.DSpaceHttpClientFactory;
|
||||
import org.dspace.app.util.Util;
|
||||
import org.json.JSONObject;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
@@ -121,33 +121,34 @@ public class OpenAIRERestConnector {
|
||||
params.add(new BasicNameValuePair("grant_type", "client_credentials"));
|
||||
httpPost.setEntity(new UrlEncodedFormEntity(params, "UTF-8"));
|
||||
|
||||
HttpClient httpClient = HttpClientBuilder.create().build();
|
||||
HttpResponse getResponse = httpClient.execute(httpPost);
|
||||
try (CloseableHttpClient httpClient = DSpaceHttpClientFactory.getInstance().build()) {
|
||||
HttpResponse getResponse = httpClient.execute(httpPost);
|
||||
|
||||
JSONObject responseObject = null;
|
||||
try (InputStream is = getResponse.getEntity().getContent();
|
||||
BufferedReader streamReader = new BufferedReader(new InputStreamReader(is, "UTF-8"))) {
|
||||
String inputStr;
|
||||
// verify if we have basic json
|
||||
while ((inputStr = streamReader.readLine()) != null && responseObject == null) {
|
||||
if (inputStr.startsWith("{") && inputStr.endsWith("}") && inputStr.contains("access_token")
|
||||
&& inputStr.contains("expires_in")) {
|
||||
try {
|
||||
responseObject = new JSONObject(inputStr);
|
||||
} catch (Exception e) {
|
||||
// Not as valid as I'd hoped, move along
|
||||
responseObject = null;
|
||||
JSONObject responseObject = null;
|
||||
try (InputStream is = getResponse.getEntity().getContent();
|
||||
BufferedReader streamReader = new BufferedReader(new InputStreamReader(is, "UTF-8"))) {
|
||||
String inputStr;
|
||||
// verify if we have basic json
|
||||
while ((inputStr = streamReader.readLine()) != null && responseObject == null) {
|
||||
if (inputStr.startsWith("{") && inputStr.endsWith("}") && inputStr.contains("access_token")
|
||||
&& inputStr.contains("expires_in")) {
|
||||
try {
|
||||
responseObject = new JSONObject(inputStr);
|
||||
} catch (Exception e) {
|
||||
// Not as valid as I'd hoped, move along
|
||||
responseObject = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (responseObject == null || !responseObject.has("access_token") || !responseObject.has("expires_in")) {
|
||||
throw new IOException("Unable to grab the access token using provided service url, client id and secret");
|
||||
}
|
||||
|
||||
return new OpenAIRERestToken(responseObject.get("access_token").toString(),
|
||||
Long.valueOf(responseObject.get("expires_in").toString()));
|
||||
if (responseObject == null || !responseObject.has("access_token") || !responseObject.has("expires_in")) {
|
||||
throw new IOException("Unable to grab the access token using provided service url, " +
|
||||
"client id and secret");
|
||||
}
|
||||
|
||||
return new OpenAIRERestToken(responseObject.get("access_token").toString(),
|
||||
Long.valueOf(responseObject.get("expires_in").toString()));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -172,42 +173,43 @@ public class OpenAIRERestConnector {
|
||||
httpGet.addHeader("Authorization", "Bearer " + accessToken);
|
||||
}
|
||||
|
||||
HttpClient httpClient = HttpClientBuilder.create().build();
|
||||
getResponse = httpClient.execute(httpGet);
|
||||
try (CloseableHttpClient httpClient = DSpaceHttpClientFactory.getInstance().build()) {
|
||||
getResponse = httpClient.execute(httpGet);
|
||||
|
||||
StatusLine status = getResponse.getStatusLine();
|
||||
StatusLine status = getResponse.getStatusLine();
|
||||
|
||||
// registering errors
|
||||
switch (status.getStatusCode()) {
|
||||
case HttpStatus.SC_NOT_FOUND:
|
||||
// 404 - Not found
|
||||
case HttpStatus.SC_FORBIDDEN:
|
||||
// 403 - Invalid Access Token
|
||||
case 429:
|
||||
// 429 - Rate limit abuse for unauthenticated user
|
||||
Header[] limitUsed = getResponse.getHeaders("x-ratelimit-used");
|
||||
Header[] limitMax = getResponse.getHeaders("x-ratelimit-limit");
|
||||
// registering errors
|
||||
switch (status.getStatusCode()) {
|
||||
case HttpStatus.SC_NOT_FOUND:
|
||||
// 404 - Not found
|
||||
case HttpStatus.SC_FORBIDDEN:
|
||||
// 403 - Invalid Access Token
|
||||
case 429:
|
||||
// 429 - Rate limit abuse for unauthenticated user
|
||||
Header[] limitUsed = getResponse.getHeaders("x-ratelimit-used");
|
||||
Header[] limitMax = getResponse.getHeaders("x-ratelimit-limit");
|
||||
|
||||
if (limitUsed.length > 0) {
|
||||
String limitMsg = limitUsed[0].getValue();
|
||||
if (limitMax.length > 0) {
|
||||
limitMsg = limitMsg.concat(" of " + limitMax[0].getValue());
|
||||
if (limitUsed.length > 0) {
|
||||
String limitMsg = limitUsed[0].getValue();
|
||||
if (limitMax.length > 0) {
|
||||
limitMsg = limitMsg.concat(" of " + limitMax[0].getValue());
|
||||
}
|
||||
getGotError(new NoHttpResponseException(status.getReasonPhrase() + " with usage limit "
|
||||
+ limitMsg),
|
||||
url + '/' + file);
|
||||
} else {
|
||||
// 429 - Rate limit abuse
|
||||
getGotError(new NoHttpResponseException(status.getReasonPhrase()), url + '/' + file);
|
||||
}
|
||||
getGotError(
|
||||
new NoHttpResponseException(status.getReasonPhrase() + " with usage limit " + limitMsg),
|
||||
url + '/' + file);
|
||||
} else {
|
||||
// 429 - Rate limit abuse
|
||||
getGotError(new NoHttpResponseException(status.getReasonPhrase()), url + '/' + file);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
// 200 or other
|
||||
break;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
// 200 or other
|
||||
break;
|
||||
}
|
||||
|
||||
// do not close this httpClient
|
||||
result = getResponse.getEntity().getContent();
|
||||
// do not close this httpClient
|
||||
result = getResponse.getEntity().getContent();
|
||||
}
|
||||
} catch (MalformedURLException e1) {
|
||||
getGotError(e1, url + '/' + file);
|
||||
} catch (Exception e) {
|
||||
|
@@ -7,17 +7,18 @@
|
||||
*/
|
||||
package org.dspace.external;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Scanner;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.client.DSpaceHttpClientFactory;
|
||||
|
||||
/**
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
@@ -39,7 +40,7 @@ public class OrcidRestConnector {
|
||||
}
|
||||
|
||||
public InputStream get(String path, String accessToken) {
|
||||
HttpResponse getResponse = null;
|
||||
CloseableHttpResponse getResponse = null;
|
||||
InputStream result = null;
|
||||
path = trimSlashes(path);
|
||||
|
||||
@@ -49,11 +50,13 @@ public class OrcidRestConnector {
|
||||
httpGet.addHeader("Content-Type", "application/vnd.orcid+xml");
|
||||
httpGet.addHeader("Authorization","Bearer " + accessToken);
|
||||
}
|
||||
try {
|
||||
HttpClient httpClient = HttpClientBuilder.create().build();
|
||||
try (CloseableHttpClient httpClient = DSpaceHttpClientFactory.getInstance().build()) {
|
||||
getResponse = httpClient.execute(httpGet);
|
||||
//do not close this httpClient
|
||||
result = getResponse.getEntity().getContent();
|
||||
try (InputStream responseStream = getResponse.getEntity().getContent()) {
|
||||
// Read all the content of the response stream into a byte array to prevent TruncatedChunkException
|
||||
byte[] content = responseStream.readAllBytes();
|
||||
result = new ByteArrayInputStream(content);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
getGotError(e, fullPath);
|
||||
}
|
||||
|
@@ -7,24 +7,17 @@
|
||||
*/
|
||||
package org.dspace.external.provider.impl;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.URLEncoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.content.dto.MetadataValueDTO;
|
||||
@@ -32,8 +25,9 @@ import org.dspace.external.OrcidRestConnector;
|
||||
import org.dspace.external.model.ExternalDataObject;
|
||||
import org.dspace.external.provider.AbstractExternalDataProvider;
|
||||
import org.dspace.external.provider.orcid.xml.XMLtoBio;
|
||||
import org.json.JSONObject;
|
||||
import org.dspace.orcid.model.factory.OrcidFactoryUtils;
|
||||
import org.orcid.jaxb.model.v3.release.common.OrcidIdentifier;
|
||||
import org.orcid.jaxb.model.v3.release.record.Email;
|
||||
import org.orcid.jaxb.model.v3.release.record.Person;
|
||||
import org.orcid.jaxb.model.v3.release.search.Result;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
@@ -60,6 +54,11 @@ public class OrcidV3AuthorDataProvider extends AbstractExternalDataProvider {
|
||||
|
||||
private XMLtoBio converter;
|
||||
|
||||
/**
|
||||
* Maximum retries to allow for the access token retrieval
|
||||
*/
|
||||
private int maxClientRetries = 3;
|
||||
|
||||
public static final String ORCID_ID_SYNTAX = "\\d{4}-\\d{4}-\\d{4}-(\\d{3}X|\\d{4})";
|
||||
private static final int MAX_INDEX = 10000;
|
||||
|
||||
@@ -78,47 +77,37 @@ public class OrcidV3AuthorDataProvider extends AbstractExternalDataProvider {
|
||||
* @throws java.io.IOException passed through from HTTPclient.
|
||||
*/
|
||||
public void init() throws IOException {
|
||||
if (StringUtils.isNotBlank(clientSecret) && StringUtils.isNotBlank(clientId)
|
||||
&& StringUtils.isNotBlank(OAUTHUrl)) {
|
||||
String authenticationParameters = "?client_id=" + clientId +
|
||||
"&client_secret=" + clientSecret +
|
||||
"&scope=/read-public&grant_type=client_credentials";
|
||||
HttpPost httpPost = new HttpPost(OAUTHUrl + authenticationParameters);
|
||||
httpPost.addHeader("Accept", "application/json");
|
||||
httpPost.addHeader("Content-Type", "application/x-www-form-urlencoded");
|
||||
// Initialize access token at spring instantiation. If it fails, the access token will be null rather
|
||||
// than causing a fatal Spring startup error
|
||||
initializeAccessToken();
|
||||
}
|
||||
|
||||
HttpClient httpClient = HttpClientBuilder.create().build();
|
||||
HttpResponse getResponse = httpClient.execute(httpPost);
|
||||
|
||||
JSONObject responseObject = null;
|
||||
try (InputStream is = getResponse.getEntity().getContent();
|
||||
BufferedReader streamReader = new BufferedReader(new InputStreamReader(is, "UTF-8"))) {
|
||||
String inputStr;
|
||||
while ((inputStr = streamReader.readLine()) != null && responseObject == null) {
|
||||
if (inputStr.startsWith("{") && inputStr.endsWith("}") && inputStr.contains("access_token")) {
|
||||
try {
|
||||
responseObject = new JSONObject(inputStr);
|
||||
} catch (Exception e) {
|
||||
//Not as valid as I'd hoped, move along
|
||||
responseObject = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (responseObject != null && responseObject.has("access_token")) {
|
||||
accessToken = (String) responseObject.get("access_token");
|
||||
}
|
||||
/**
|
||||
* Initialize access token, logging an error and decrementing remaining retries if an IOException is thrown.
|
||||
* If the optional access token result is empty, set to null instead.
|
||||
*/
|
||||
public void initializeAccessToken() {
|
||||
// If we have reaches max retries or the access token is already set, return immediately
|
||||
if (maxClientRetries <= 0 || StringUtils.isNotBlank(accessToken)) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
accessToken = OrcidFactoryUtils.retrieveAccessToken(clientId, clientSecret, OAUTHUrl).orElse(null);
|
||||
} catch (IOException e) {
|
||||
log.error("Error retrieving ORCID access token, {} retries left", --maxClientRetries);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<ExternalDataObject> getExternalDataObject(String id) {
|
||||
initializeAccessToken();
|
||||
Person person = getBio(id);
|
||||
ExternalDataObject externalDataObject = convertToExternalDataObject(person);
|
||||
return Optional.of(externalDataObject);
|
||||
}
|
||||
|
||||
protected ExternalDataObject convertToExternalDataObject(Person person) {
|
||||
initializeAccessToken();
|
||||
ExternalDataObject externalDataObject = new ExternalDataObject(sourceIdentifier);
|
||||
if (person.getName() != null) {
|
||||
String lastName = "";
|
||||
@@ -126,13 +115,20 @@ public class OrcidV3AuthorDataProvider extends AbstractExternalDataProvider {
|
||||
if (person.getName().getFamilyName() != null) {
|
||||
lastName = person.getName().getFamilyName().getContent();
|
||||
externalDataObject.addMetadata(new MetadataValueDTO("person", "familyName", null, null,
|
||||
lastName));
|
||||
lastName));
|
||||
}
|
||||
if (person.getName().getGivenNames() != null) {
|
||||
firstName = person.getName().getGivenNames().getContent();
|
||||
externalDataObject.addMetadata(new MetadataValueDTO("person", "givenName", null, null,
|
||||
firstName));
|
||||
|
||||
firstName));
|
||||
}
|
||||
if (person.getEmails().getEmails() != null && !person.getEmails().getEmails().isEmpty()) {
|
||||
Email email = person.getEmails().getEmails().get(0);
|
||||
if (person.getEmails().getEmails().size() > 1) {
|
||||
email = person.getEmails().getEmails().stream().filter(Email::isPrimary).findFirst().orElse(email);
|
||||
}
|
||||
externalDataObject.addMetadata(new MetadataValueDTO("person", "email", null,
|
||||
null, email.getEmail()));
|
||||
}
|
||||
externalDataObject.setId(person.getName().getPath());
|
||||
externalDataObject
|
||||
@@ -140,7 +136,7 @@ public class OrcidV3AuthorDataProvider extends AbstractExternalDataProvider {
|
||||
new MetadataValueDTO("person", "identifier", "orcid", null, person.getName().getPath()));
|
||||
externalDataObject
|
||||
.addMetadata(new MetadataValueDTO("dc", "identifier", "uri", null,
|
||||
orcidUrl + "/" + person.getName().getPath()));
|
||||
orcidUrl + "/" + person.getName().getPath()));
|
||||
if (!StringUtils.isBlank(lastName) && !StringUtils.isBlank(firstName)) {
|
||||
externalDataObject.setDisplayValue(lastName + ", " + firstName);
|
||||
externalDataObject.setValue(lastName + ", " + firstName);
|
||||
@@ -151,8 +147,8 @@ public class OrcidV3AuthorDataProvider extends AbstractExternalDataProvider {
|
||||
externalDataObject.setDisplayValue(firstName);
|
||||
externalDataObject.setValue(firstName);
|
||||
}
|
||||
} else if (person.getPath() != null ) {
|
||||
externalDataObject.setId(StringUtils.substringBetween(person.getPath(),"/","/person"));
|
||||
} else if (person.getPath() != null) {
|
||||
externalDataObject.setId(StringUtils.substringBetween(person.getPath(), "/", "/person"));
|
||||
}
|
||||
return externalDataObject;
|
||||
}
|
||||
@@ -167,14 +163,13 @@ public class OrcidV3AuthorDataProvider extends AbstractExternalDataProvider {
|
||||
if (!isValid(id)) {
|
||||
return null;
|
||||
}
|
||||
InputStream bioDocument = orcidRestConnector.get(id + ((id.endsWith("/person")) ? "" : "/person"), accessToken);
|
||||
Person person = converter.convertSinglePerson(bioDocument);
|
||||
try {
|
||||
bioDocument.close();
|
||||
} catch (IOException e) {
|
||||
log.error(e.getMessage(), e);
|
||||
if (orcidRestConnector == null) {
|
||||
log.error("ORCID REST connector is null, returning null ORCID Person Bio");
|
||||
return null;
|
||||
}
|
||||
return person;
|
||||
initializeAccessToken();
|
||||
InputStream bioDocument = orcidRestConnector.get(id + ((id.endsWith("/person")) ? "" : "/person"), accessToken);
|
||||
return converter.convertSinglePerson(bioDocument);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -188,12 +183,18 @@ public class OrcidV3AuthorDataProvider extends AbstractExternalDataProvider {
|
||||
|
||||
@Override
|
||||
public List<ExternalDataObject> searchExternalDataObjects(String query, int start, int limit) {
|
||||
initializeAccessToken();
|
||||
if (limit > 100) {
|
||||
throw new IllegalArgumentException("The maximum number of results to retrieve cannot exceed 100.");
|
||||
}
|
||||
if (start > MAX_INDEX) {
|
||||
throw new IllegalArgumentException("The starting number of results to retrieve cannot exceed 10000.");
|
||||
}
|
||||
// Check REST connector is initialized
|
||||
if (orcidRestConnector == null) {
|
||||
log.error("ORCID REST connector is not initialized, returning empty list");
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
String searchPath = "search?q=" + URLEncoder.encode(query, StandardCharsets.UTF_8)
|
||||
+ "&start=" + start
|
||||
@@ -205,7 +206,7 @@ public class OrcidV3AuthorDataProvider extends AbstractExternalDataProvider {
|
||||
for (Result result : results) {
|
||||
OrcidIdentifier orcidIdentifier = result.getOrcidIdentifier();
|
||||
if (orcidIdentifier != null) {
|
||||
log.debug("Found OrcidId=" + orcidIdentifier.toString());
|
||||
log.debug("Found OrcidId=" + orcidIdentifier.getPath());
|
||||
String orcid = orcidIdentifier.getPath();
|
||||
Person bio = getBio(orcid);
|
||||
if (bio != null) {
|
||||
@@ -213,14 +214,6 @@ public class OrcidV3AuthorDataProvider extends AbstractExternalDataProvider {
|
||||
}
|
||||
}
|
||||
}
|
||||
try {
|
||||
bioDocument.close();
|
||||
} catch (IOException e) {
|
||||
log.error(e.getMessage(), e);
|
||||
}
|
||||
if (Objects.isNull(bios)) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
return bios.stream().map(bio -> convertToExternalDataObject(bio)).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@@ -231,6 +224,11 @@ public class OrcidV3AuthorDataProvider extends AbstractExternalDataProvider {
|
||||
|
||||
@Override
|
||||
public int getNumberOfResults(String query) {
|
||||
if (orcidRestConnector == null) {
|
||||
log.error("ORCID REST connector is null, returning 0");
|
||||
return 0;
|
||||
}
|
||||
initializeAccessToken();
|
||||
String searchPath = "search?q=" + URLEncoder.encode(query, StandardCharsets.UTF_8)
|
||||
+ "&start=" + 0
|
||||
+ "&rows=" + 0;
|
||||
|
@@ -97,7 +97,7 @@ public class SHERPAv2JournalDataProvider extends AbstractExternalDataProvider {
|
||||
if (CollectionUtils.isNotEmpty(sherpaJournal.getIssns())) {
|
||||
String issn = sherpaJournal.getIssns().get(0);
|
||||
externalDataObject.addMetadata(new MetadataValueDTO(
|
||||
"dc", "identifier", "issn", null, issn));
|
||||
"creativeworkseries", "issn", null, null, issn));
|
||||
|
||||
}
|
||||
|
||||
|
@@ -106,8 +106,7 @@ public class SHERPAv2JournalISSNDataProvider extends AbstractExternalDataProvide
|
||||
String issn = sherpaJournal.getIssns().get(0);
|
||||
externalDataObject.setId(issn);
|
||||
externalDataObject.addMetadata(new MetadataValueDTO(
|
||||
"dc", "identifier", "issn", null, issn));
|
||||
|
||||
"creativeworkseries", "issn", null, null, issn));
|
||||
}
|
||||
|
||||
log.debug("New external data object. Title=" + externalDataObject.getValue() + ". ID="
|
||||
|
@@ -16,6 +16,7 @@ import javax.xml.stream.XMLInputFactory;
|
||||
import javax.xml.stream.XMLStreamException;
|
||||
import javax.xml.stream.XMLStreamReader;
|
||||
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
/**
|
||||
@@ -31,9 +32,7 @@ public abstract class Converter<T> {
|
||||
|
||||
protected Object unmarshall(InputStream input, Class<?> type) throws SAXException, URISyntaxException {
|
||||
try {
|
||||
XMLInputFactory xmlInputFactory = XMLInputFactory.newFactory();
|
||||
// disallow DTD parsing to ensure no XXE attacks can occur
|
||||
xmlInputFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false);
|
||||
XMLInputFactory xmlInputFactory = XMLUtils.getXMLInputFactory();
|
||||
XMLStreamReader xmlStreamReader = xmlInputFactory.createXMLStreamReader(input);
|
||||
|
||||
JAXBContext context = JAXBContext.newInstance(type);
|
||||
|
@@ -21,9 +21,9 @@ import org.apache.http.client.entity.UrlEncodedFormEntity;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClients;
|
||||
import org.apache.http.message.BasicNameValuePair;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.client.DSpaceHttpClientFactory;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.service.ClientInfoService;
|
||||
@@ -56,7 +56,7 @@ public class GoogleRecorderEventListener extends AbstractUsageEventListener {
|
||||
|
||||
public GoogleRecorderEventListener() {
|
||||
// httpclient is threadsafe so we only need one.
|
||||
httpclient = HttpClients.createDefault();
|
||||
httpclient = DSpaceHttpClientFactory.getInstance().build();
|
||||
}
|
||||
|
||||
@Autowired
|
||||
|
@@ -18,7 +18,7 @@ import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClients;
|
||||
import org.dspace.app.client.DSpaceHttpClientFactory;
|
||||
import org.dspace.google.GoogleAnalyticsEvent;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -42,7 +42,7 @@ public class GoogleAnalyticsClientImpl implements GoogleAnalyticsClient {
|
||||
public GoogleAnalyticsClientImpl(String keyPrefix, GoogleAnalyticsClientRequestBuilder requestBuilder) {
|
||||
this.keyPrefix = keyPrefix;
|
||||
this.requestBuilder = requestBuilder;
|
||||
this.httpclient = HttpClients.createDefault();
|
||||
this.httpclient = DSpaceHttpClientFactory.getInstance().build();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -50,26 +50,26 @@ public class UserCheck extends Check {
|
||||
info.put("Self registered", 0);
|
||||
|
||||
for (EPerson e : epersons) {
|
||||
if (e.getEmail() != null && e.getEmail().length() > 0) {
|
||||
if (e.getEmail() != null && !e.getEmail().isEmpty()) {
|
||||
info.put("Have email", info.get("Have email") + 1);
|
||||
}
|
||||
if (e.canLogIn()) {
|
||||
info.put("Can log in (password)",
|
||||
info.get("Can log in (password)") + 1);
|
||||
}
|
||||
if (e.getFirstName() != null && e.getFirstName().length() > 0) {
|
||||
if (e.getFirstName() != null && !e.getFirstName().isEmpty()) {
|
||||
info.put("Have 1st name", info.get("Have 1st name") + 1);
|
||||
}
|
||||
if (e.getLastName() != null && e.getLastName().length() > 0) {
|
||||
if (e.getLastName() != null && !e.getLastName().isEmpty()) {
|
||||
info.put("Have 2nd name", info.get("Have 2nd name") + 1);
|
||||
}
|
||||
if (e.getLanguage() != null && e.getLanguage().length() > 0) {
|
||||
if (e.getLanguage() != null && !e.getLanguage().isEmpty()) {
|
||||
info.put("Have lang", info.get("Have lang") + 1);
|
||||
}
|
||||
if (e.getNetid() != null && e.getNetid().length() > 0) {
|
||||
if (e.getNetid() != null && !e.getNetid().isEmpty()) {
|
||||
info.put("Have netid", info.get("Have netid") + 1);
|
||||
}
|
||||
if (e.getNetid() != null && e.getNetid().length() > 0) {
|
||||
if (e.getNetid() != null && !e.getNetid().isEmpty()) {
|
||||
info.put("Self registered", info.get("Self registered") + 1);
|
||||
}
|
||||
}
|
||||
|
@@ -15,11 +15,14 @@ import java.util.List;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataSchemaEnum;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.DSpaceObjectService;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogHelper;
|
||||
@@ -64,9 +67,6 @@ public class VersionedHandleIdentifierProviderWithCanonicalHandles extends Ident
|
||||
@Autowired(required = true)
|
||||
private HandleService handleService;
|
||||
|
||||
@Autowired(required = true)
|
||||
private ItemService itemService;
|
||||
|
||||
/**
|
||||
* After all the properties are set check that the versioning is enabled
|
||||
*
|
||||
@@ -173,6 +173,16 @@ public class VersionedHandleIdentifierProviderWithCanonicalHandles extends Ident
|
||||
throw new RuntimeException("The current user is not authorized to change this item.", ex);
|
||||
}
|
||||
}
|
||||
if (dso instanceof Collection || dso instanceof Community) {
|
||||
try {
|
||||
// Update the metadata with the handle for collections and communities.
|
||||
modifyHandleMetadata(context, dso, getCanonical(id));
|
||||
} catch (SQLException ex) {
|
||||
throw new RuntimeException("A problem with the database connection occured.", ex);
|
||||
} catch (AuthorizeException ex) {
|
||||
throw new RuntimeException("The current user is not authorized to change this item.", ex);
|
||||
}
|
||||
}
|
||||
|
||||
return id;
|
||||
}
|
||||
@@ -491,27 +501,29 @@ public class VersionedHandleIdentifierProviderWithCanonicalHandles extends Ident
|
||||
* Remove all handles from an item's metadata and add the supplied handle instead.
|
||||
*
|
||||
* @param context The relevant DSpace Context.
|
||||
* @param item which item to modify
|
||||
* @param dso which dso to modify
|
||||
* @param handle which handle to add
|
||||
* @throws SQLException if database error
|
||||
* @throws AuthorizeException if authorization error
|
||||
*/
|
||||
protected void modifyHandleMetadata(Context context, Item item, String handle)
|
||||
protected void modifyHandleMetadata(Context context, DSpaceObject dso, String handle)
|
||||
throws SQLException, AuthorizeException {
|
||||
// we want to exchange the old handle against the new one. To do so, we
|
||||
// load all identifiers, clear the metadata field, re add all
|
||||
// identifiers which are not from type handle and add the new handle.
|
||||
String handleref = handleService.getCanonicalForm(handle);
|
||||
List<MetadataValue> identifiers = itemService
|
||||
.getMetadata(item, MetadataSchemaEnum.DC.getName(), "identifier", "uri", Item.ANY);
|
||||
itemService.clearMetadata(context, item, MetadataSchemaEnum.DC.getName(), "identifier", "uri", Item.ANY);
|
||||
DSpaceObjectService<DSpaceObject> dSpaceObjectService =
|
||||
ContentServiceFactory.getInstance().getDSpaceObjectService(dso);
|
||||
List<MetadataValue> identifiers = dSpaceObjectService
|
||||
.getMetadata(dso, MetadataSchemaEnum.DC.getName(), "identifier", "uri", Item.ANY);
|
||||
dSpaceObjectService.clearMetadata(context, dso, MetadataSchemaEnum.DC.getName(), "identifier", "uri", Item.ANY);
|
||||
for (MetadataValue identifier : identifiers) {
|
||||
if (this.supports(identifier.getValue())) {
|
||||
// ignore handles
|
||||
continue;
|
||||
}
|
||||
itemService.addMetadata(context,
|
||||
item,
|
||||
dSpaceObjectService.addMetadata(context,
|
||||
dso,
|
||||
identifier.getMetadataField(),
|
||||
identifier.getLanguage(),
|
||||
identifier.getValue(),
|
||||
@@ -519,9 +531,9 @@ public class VersionedHandleIdentifierProviderWithCanonicalHandles extends Ident
|
||||
identifier.getConfidence());
|
||||
}
|
||||
if (!StringUtils.isEmpty(handleref)) {
|
||||
itemService.addMetadata(context, item, MetadataSchemaEnum.DC.getName(),
|
||||
dSpaceObjectService.addMetadata(context, dso, MetadataSchemaEnum.DC.getName(),
|
||||
"identifier", "uri", null, handleref);
|
||||
}
|
||||
itemService.update(context, item);
|
||||
dSpaceObjectService.update(context, dso);
|
||||
}
|
||||
}
|
||||
|
@@ -448,28 +448,28 @@ public class DOIOrganiser {
|
||||
+ " is successfully registered.");
|
||||
}
|
||||
} catch (IdentifierException ex) {
|
||||
String message;
|
||||
if (!(ex instanceof DOIIdentifierException)) {
|
||||
LOG.error("It wasn't possible to register this identifier: "
|
||||
+ DOI.SCHEME + doiRow.getDoi()
|
||||
+ " online. ", ex);
|
||||
message = "It wasn't possible to register this identifier: "
|
||||
+ DOI.SCHEME + doiRow.getDoi()
|
||||
+ " online. ";
|
||||
} else {
|
||||
DOIIdentifierException doiIdentifierException = (DOIIdentifierException) ex;
|
||||
message = "It wasn't possible to register this identifier : "
|
||||
+ DOI.SCHEME + doiRow.getDoi()
|
||||
+ " online. Exceptions code: "
|
||||
+ DOIIdentifierException.codeToString(doiIdentifierException.getCode());
|
||||
}
|
||||
|
||||
DOIIdentifierException doiIdentifierException = (DOIIdentifierException) ex;
|
||||
|
||||
try {
|
||||
sendAlertMail("Register", dso,
|
||||
DOI.SCHEME + doiRow.getDoi(),
|
||||
doiIdentifierException.codeToString(doiIdentifierException
|
||||
.getCode()));
|
||||
message);
|
||||
} catch (IOException ioe) {
|
||||
LOG.error("Couldn't send mail", ioe);
|
||||
}
|
||||
|
||||
LOG.error("It wasn't possible to register this identifier : "
|
||||
+ DOI.SCHEME + doiRow.getDoi()
|
||||
+ " online. Exceptions code: "
|
||||
+ doiIdentifierException
|
||||
.codeToString(doiIdentifierException.getCode()), ex);
|
||||
LOG.error(message, ex);
|
||||
|
||||
if (!quiet) {
|
||||
System.err.println("It wasn't possible to register this identifier: "
|
||||
@@ -541,27 +541,27 @@ public class DOIOrganiser {
|
||||
System.out.println("This identifier : " + DOI.SCHEME + doiRow.getDoi() + " is successfully reserved.");
|
||||
}
|
||||
} catch (IdentifierException ex) {
|
||||
String message;
|
||||
if (!(ex instanceof DOIIdentifierException)) {
|
||||
LOG.error("It wasn't possible to register this identifier : "
|
||||
+ DOI.SCHEME + doiRow.getDoi()
|
||||
+ " online. ", ex);
|
||||
message = "It wasn't possible to register this identifier : "
|
||||
+ DOI.SCHEME + doiRow.getDoi()
|
||||
+ " online. ";
|
||||
} else {
|
||||
DOIIdentifierException doiIdentifierException = (DOIIdentifierException) ex;
|
||||
message = "It wasn't possible to reserve the identifier online. "
|
||||
+ " Exceptions code: "
|
||||
+ DOIIdentifierException.codeToString(doiIdentifierException.getCode());
|
||||
}
|
||||
|
||||
DOIIdentifierException doiIdentifierException = (DOIIdentifierException) ex;
|
||||
|
||||
try {
|
||||
sendAlertMail("Reserve", dso,
|
||||
DOI.SCHEME + doiRow.getDoi(),
|
||||
DOIIdentifierException.codeToString(
|
||||
doiIdentifierException.getCode()));
|
||||
message);
|
||||
} catch (IOException ioe) {
|
||||
LOG.error("Couldn't send mail", ioe);
|
||||
}
|
||||
|
||||
LOG.error("It wasn't possible to reserve the identifier online. "
|
||||
+ " Exceptions code: "
|
||||
+ DOIIdentifierException
|
||||
.codeToString(doiIdentifierException.getCode()), ex);
|
||||
LOG.error(message, ex);
|
||||
|
||||
if (!quiet) {
|
||||
System.err.println("It wasn't possible to reserve this identifier: " + DOI.SCHEME + doiRow.getDoi());
|
||||
@@ -606,27 +606,27 @@ public class DOIOrganiser {
|
||||
+ doiRow.getDoi() + ".");
|
||||
}
|
||||
} catch (IdentifierException ex) {
|
||||
String message;
|
||||
if (!(ex instanceof DOIIdentifierException)) {
|
||||
LOG.error("Registering DOI {} for object {}: the registrar returned an error.",
|
||||
doiRow.getDoi(), dso.getID(), ex);
|
||||
message = String.format("Registering DOI %s for object %s: the registrar returned an error.",
|
||||
doiRow.getDoi(), dso.getID());
|
||||
} else {
|
||||
DOIIdentifierException doiIdentifierException = (DOIIdentifierException) ex;
|
||||
message = "It wasn't possible to update this identifier: "
|
||||
+ DOI.SCHEME + doiRow.getDoi()
|
||||
+ " Exceptions code: "
|
||||
+ DOIIdentifierException.codeToString(doiIdentifierException.getCode());
|
||||
}
|
||||
|
||||
DOIIdentifierException doiIdentifierException = (DOIIdentifierException) ex;
|
||||
|
||||
try {
|
||||
sendAlertMail("Update", dso,
|
||||
DOI.SCHEME + doiRow.getDoi(),
|
||||
doiIdentifierException.codeToString(doiIdentifierException
|
||||
.getCode()));
|
||||
message);
|
||||
} catch (IOException ioe) {
|
||||
LOG.error("Couldn't send mail", ioe);
|
||||
}
|
||||
|
||||
LOG.error("It wasn't possible to update this identifier: "
|
||||
+ DOI.SCHEME + doiRow.getDoi()
|
||||
+ " Exceptions code: "
|
||||
+ doiIdentifierException
|
||||
.codeToString(doiIdentifierException.getCode()), ex);
|
||||
LOG.error(message, ex);
|
||||
|
||||
if (!quiet) {
|
||||
System.err.println("It wasn't possible to update this identifier: " + DOI.SCHEME + doiRow.getDoi());
|
||||
@@ -830,4 +830,4 @@ public class DOIOrganiser {
|
||||
this.quiet = true;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
@@ -36,8 +36,9 @@ import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.impl.client.BasicCredentialsProvider;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.dspace.app.client.DSpaceHttpClientFactory;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.crosswalk.CrosswalkException;
|
||||
@@ -722,7 +723,7 @@ public class DataCiteConnector
|
||||
httpContext.setCredentialsProvider(credentialsProvider);
|
||||
|
||||
HttpEntity entity = null;
|
||||
try ( CloseableHttpClient httpclient = HttpClientBuilder.create().build(); ) {
|
||||
try (CloseableHttpClient httpclient = DSpaceHttpClientFactory.getInstance().build()) {
|
||||
HttpResponse response = httpclient.execute(req, httpContext);
|
||||
|
||||
StatusLine status = response.getStatusLine();
|
||||
@@ -832,7 +833,7 @@ public class DataCiteConnector
|
||||
}
|
||||
|
||||
// parse the XML
|
||||
SAXBuilder saxBuilder = new SAXBuilder();
|
||||
SAXBuilder saxBuilder = XMLUtils.getSAXBuilder();
|
||||
Document doc = null;
|
||||
try {
|
||||
doc = saxBuilder.build(new ByteArrayInputStream(content.getBytes("UTF-8")));
|
||||
|
@@ -26,7 +26,7 @@ import org.apache.http.client.protocol.HttpClientContext;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.impl.client.BasicCredentialsProvider;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.dspace.app.client.DSpaceHttpClientFactory;
|
||||
import org.dspace.identifier.DOI;
|
||||
import org.dspace.identifier.IdentifierException;
|
||||
import org.slf4j.Logger;
|
||||
@@ -87,7 +87,7 @@ public class EZIDRequest {
|
||||
this.authority = authority;
|
||||
}
|
||||
|
||||
client = HttpClientBuilder.create().build();
|
||||
client = DSpaceHttpClientFactory.getInstance().build();
|
||||
httpContext = HttpClientContext.create();
|
||||
if (null != username) {
|
||||
URI uri = new URI(scheme, host, path, null);
|
||||
@@ -124,7 +124,7 @@ public class EZIDRequest {
|
||||
this.authority = authority;
|
||||
}
|
||||
|
||||
client = HttpClientBuilder.create().build();
|
||||
client = DSpaceHttpClientFactory.getInstance().build();
|
||||
httpContext = HttpClientContext.create();
|
||||
if (null != username) {
|
||||
CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
|
||||
|
@@ -12,12 +12,14 @@ import static org.dspace.iiif.canvasdimension.Util.checkDimensions;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.URL;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.client.DSpaceHttpClientFactory;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.iiif.util.IIIFSharedUtils;
|
||||
|
||||
@@ -35,14 +37,10 @@ public class IIIFApiQueryServiceImpl implements IIIFApiQueryService {
|
||||
public int[] getImageDimensions(Bitstream bitstream) {
|
||||
int[] arr = new int[2];
|
||||
String path = IIIFSharedUtils.getInfoJsonPath(bitstream);
|
||||
URL url;
|
||||
BufferedReader in = null;
|
||||
try {
|
||||
url = new URL(path);
|
||||
HttpURLConnection con = (HttpURLConnection) url.openConnection();
|
||||
con.setRequestMethod("GET");
|
||||
in = new BufferedReader(
|
||||
new InputStreamReader(con.getInputStream()));
|
||||
try (CloseableHttpClient httpClient = DSpaceHttpClientFactory.getInstance().build()) {
|
||||
CloseableHttpResponse httpResponse = httpClient.execute(new HttpGet(path));
|
||||
in = new BufferedReader(new InputStreamReader(httpResponse.getEntity().getContent()));
|
||||
String inputLine;
|
||||
StringBuilder response = new StringBuilder();
|
||||
while ((inputLine = in.readLine()) != null) {
|
||||
|
@@ -17,7 +17,6 @@ import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.Callable;
|
||||
import javax.el.MethodNotFoundException;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
@@ -99,7 +98,7 @@ public class ADSImportMetadataSourceServiceImpl extends AbstractImportMetadataSo
|
||||
|
||||
@Override
|
||||
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
|
||||
throw new MethodNotFoundException("This method is not implemented for CrossRef");
|
||||
throw new UnsupportedOperationException("This method is not implemented for CrossRef");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -14,7 +14,6 @@ import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Callable;
|
||||
import javax.el.MethodNotFoundException;
|
||||
import javax.ws.rs.client.Client;
|
||||
import javax.ws.rs.client.ClientBuilder;
|
||||
import javax.ws.rs.client.Invocation;
|
||||
@@ -23,6 +22,7 @@ import javax.ws.rs.core.MediaType;
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.importer.external.datamodel.ImportRecord;
|
||||
import org.dspace.importer.external.datamodel.Query;
|
||||
@@ -162,7 +162,7 @@ public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadata
|
||||
@Override
|
||||
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
|
||||
// FIXME: we need this method?
|
||||
throw new MethodNotFoundException("This method is not implemented for ArXiv");
|
||||
throw new UnsupportedOperationException("This method is not implemented for ArXiv");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -219,7 +219,7 @@ public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadata
|
||||
if (response.getStatus() == 200) {
|
||||
String responseString = response.readEntity(String.class);
|
||||
|
||||
SAXBuilder saxBuilder = new SAXBuilder();
|
||||
SAXBuilder saxBuilder = XMLUtils.getSAXBuilder();
|
||||
Document document = saxBuilder.build(new StringReader(responseString));
|
||||
Element root = document.getRootElement();
|
||||
|
||||
@@ -400,7 +400,7 @@ public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadata
|
||||
private List<Element> splitToRecords(String recordsSrc) {
|
||||
|
||||
try {
|
||||
SAXBuilder saxBuilder = new SAXBuilder();
|
||||
SAXBuilder saxBuilder = XMLUtils.getSAXBuilder();
|
||||
Document document = saxBuilder.build(new StringReader(recordsSrc));
|
||||
Element root = document.getRootElement();
|
||||
|
||||
|
@@ -19,7 +19,6 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.Callable;
|
||||
import javax.el.MethodNotFoundException;
|
||||
|
||||
import org.apache.commons.collections.CollectionUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
@@ -27,6 +26,7 @@ import org.apache.http.HttpException;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.importer.external.datamodel.ImportRecord;
|
||||
import org.dspace.importer.external.datamodel.Query;
|
||||
@@ -113,7 +113,7 @@ public class CiniiImportMetadataSourceServiceImpl extends AbstractImportMetadata
|
||||
|
||||
@Override
|
||||
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
|
||||
throw new MethodNotFoundException("This method is not implemented for Cinii");
|
||||
throw new UnsupportedOperationException("This method is not implemented for Cinii");
|
||||
}
|
||||
|
||||
public String getUrl() {
|
||||
@@ -302,9 +302,7 @@ public class CiniiImportMetadataSourceServiceImpl extends AbstractImportMetadata
|
||||
|
||||
private List<Element> splitToRecords(String recordsSrc) {
|
||||
try {
|
||||
SAXBuilder saxBuilder = new SAXBuilder();
|
||||
// disallow DTD parsing to ensure no XXE attacks can occur
|
||||
saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true);
|
||||
SAXBuilder saxBuilder = XMLUtils.getSAXBuilder();
|
||||
Document document = saxBuilder.build(new StringReader(recordsSrc));
|
||||
Element root = document.getRootElement();
|
||||
return root.getChildren();
|
||||
@@ -357,9 +355,7 @@ public class CiniiImportMetadataSourceServiceImpl extends AbstractImportMetadata
|
||||
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
|
||||
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
|
||||
int url_len = this.url.length() - 1;
|
||||
SAXBuilder saxBuilder = new SAXBuilder();
|
||||
// disallow DTD parsing to ensure no XXE attacks can occur
|
||||
saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true);
|
||||
SAXBuilder saxBuilder = XMLUtils.getSAXBuilder();
|
||||
Document document = saxBuilder.build(new StringReader(response));
|
||||
Element root = document.getRootElement();
|
||||
List<Namespace> namespaces = Arrays.asList(
|
||||
@@ -421,9 +417,7 @@ public class CiniiImportMetadataSourceServiceImpl extends AbstractImportMetadata
|
||||
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
|
||||
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
|
||||
|
||||
SAXBuilder saxBuilder = new SAXBuilder();
|
||||
// disallow DTD parsing to ensure no XXE attacks can occur
|
||||
saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true);
|
||||
SAXBuilder saxBuilder = XMLUtils.getSAXBuilder();
|
||||
Document document = saxBuilder.build(new StringReader(response));
|
||||
Element root = document.getRootElement();
|
||||
List<Namespace> namespaces = Arrays
|
||||
@@ -450,4 +444,4 @@ public class CiniiImportMetadataSourceServiceImpl extends AbstractImportMetadata
|
||||
return metadatumDTO;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
@@ -12,7 +12,6 @@ import java.io.StringReader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
@@ -21,6 +20,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.importer.external.metadatamapping.contributor.JsonPathMetadataProcessor;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Node;
|
||||
@@ -64,10 +64,9 @@ public class CrossRefAbstractProcessor implements JsonPathMetadataProcessor {
|
||||
}
|
||||
|
||||
String xmlString = "<root>" + abstractValue + "</root>";
|
||||
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
|
||||
Document xmlDoc;
|
||||
try {
|
||||
DocumentBuilder builder = factory.newDocumentBuilder();
|
||||
DocumentBuilder builder = XMLUtils.getDocumentBuilder();
|
||||
InputSource is = new InputSource(new StringReader(xmlString));
|
||||
xmlDoc = builder.parse(is);
|
||||
} catch (SAXException | IOException | ParserConfigurationException e) {
|
||||
|
@@ -16,7 +16,6 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.Callable;
|
||||
import javax.el.MethodNotFoundException;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
@@ -112,7 +111,7 @@ public class CrossRefImportMetadataSourceServiceImpl extends AbstractImportMetad
|
||||
|
||||
@Override
|
||||
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
|
||||
throw new MethodNotFoundException("This method is not implemented for CrossRef");
|
||||
throw new UnsupportedOperationException("This method is not implemented for CrossRef");
|
||||
}
|
||||
|
||||
public String getID(String id) {
|
||||
|
@@ -13,7 +13,6 @@ import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.el.MethodNotFoundException;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
@@ -188,7 +187,7 @@ public class DataCiteImportMetadataSourceServiceImpl
|
||||
|
||||
@Override
|
||||
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
|
||||
throw new MethodNotFoundException("This method is not implemented for DataCite");
|
||||
throw new UnsupportedOperationException("This method is not implemented for DataCite");
|
||||
}
|
||||
|
||||
public String getID(String query) {
|
||||
|
@@ -32,6 +32,7 @@ import org.apache.http.client.utils.URIBuilder;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.xerces.impl.dv.util.Base64;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.importer.external.datamodel.ImportRecord;
|
||||
import org.dspace.importer.external.datamodel.Query;
|
||||
@@ -397,9 +398,11 @@ public class EpoImportMetadataSourceServiceImpl extends AbstractImportMetadataSo
|
||||
|
||||
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
|
||||
|
||||
SAXBuilder saxBuilder = new SAXBuilder();
|
||||
// disallow DTD parsing to ensure no XXE attacks can occur
|
||||
saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true);
|
||||
SAXBuilder saxBuilder = XMLUtils.getSAXBuilder();
|
||||
// To properly parse EPO responses, we must allow DOCTYPEs overall. But, we can still apply all the
|
||||
// other default XXE protections, including disabling external entities and entity expansion.
|
||||
// NOTE: we only need to allow DOCTYPEs for this initial API call. All other calls have them disabled.
|
||||
saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl", false);
|
||||
Document document = saxBuilder.build(new StringReader(response));
|
||||
Element root = document.getRootElement();
|
||||
|
||||
@@ -436,9 +439,7 @@ public class EpoImportMetadataSourceServiceImpl extends AbstractImportMetadataSo
|
||||
|
||||
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
|
||||
|
||||
SAXBuilder saxBuilder = new SAXBuilder();
|
||||
// disallow DTD parsing to ensure no XXE attacks can occur
|
||||
saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true);
|
||||
SAXBuilder saxBuilder = XMLUtils.getSAXBuilder();
|
||||
Document document = saxBuilder.build(new StringReader(response));
|
||||
Element root = document.getRootElement();
|
||||
|
||||
@@ -489,9 +490,7 @@ public class EpoImportMetadataSourceServiceImpl extends AbstractImportMetadataSo
|
||||
|
||||
private List<Element> splitToRecords(String recordsSrc) {
|
||||
try {
|
||||
SAXBuilder saxBuilder = new SAXBuilder();
|
||||
// disallow DTD parsing to ensure no XXE attacks can occur
|
||||
saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true);
|
||||
SAXBuilder saxBuilder = XMLUtils.getSAXBuilder();
|
||||
Document document = saxBuilder.build(new StringReader(recordsSrc));
|
||||
Element root = document.getRootElement();
|
||||
List<Namespace> namespaces = Arrays.asList(Namespace.getNamespace("ns", "http://www.epo.org/exchange"));
|
||||
|
@@ -17,19 +17,16 @@ import java.util.Optional;
|
||||
import org.apache.commons.collections.MapUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.HttpHost;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.client.config.RequestConfig;
|
||||
import org.apache.http.client.config.RequestConfig.Builder;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpRequestBase;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClients;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.client.DSpaceHttpClientFactory;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
@@ -53,16 +50,15 @@ public class LiveImportClientImpl implements LiveImportClient {
|
||||
@Override
|
||||
public String executeHttpGetRequest(int timeout, String URL, Map<String, Map<String, String>> params) {
|
||||
HttpGet method = null;
|
||||
RequestConfig config = RequestConfig.custom()
|
||||
.setConnectionRequestTimeout(timeout)
|
||||
.setConnectTimeout(timeout)
|
||||
.setSocketTimeout(timeout)
|
||||
.build();
|
||||
try (CloseableHttpClient httpClient = Optional.ofNullable(this.httpClient)
|
||||
.orElseGet(HttpClients::createDefault)) {
|
||||
|
||||
Builder requestConfigBuilder = RequestConfig.custom();
|
||||
requestConfigBuilder.setConnectionRequestTimeout(timeout);
|
||||
RequestConfig defaultRequestConfig = requestConfigBuilder.build();
|
||||
|
||||
.orElse(DSpaceHttpClientFactory.getInstance().buildWithRequestConfig(config))) {
|
||||
String uri = buildUrl(URL, params.get(URI_PARAMETERS));
|
||||
method = new HttpGet(uri);
|
||||
method.setConfig(defaultRequestConfig);
|
||||
|
||||
Map<String, String> headerParams = params.get(HEADER_PARAMETERS);
|
||||
if (MapUtils.isNotEmpty(headerParams)) {
|
||||
@@ -71,7 +67,6 @@ public class LiveImportClientImpl implements LiveImportClient {
|
||||
}
|
||||
}
|
||||
|
||||
configureProxy(method, defaultRequestConfig);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Performing GET request to \"" + uri + "\"...");
|
||||
}
|
||||
@@ -95,21 +90,17 @@ public class LiveImportClientImpl implements LiveImportClient {
|
||||
@Override
|
||||
public String executeHttpPostRequest(String URL, Map<String, Map<String, String>> params, String entry) {
|
||||
HttpPost method = null;
|
||||
RequestConfig config = RequestConfig.custom().build();
|
||||
try (CloseableHttpClient httpClient = Optional.ofNullable(this.httpClient)
|
||||
.orElseGet(HttpClients::createDefault)) {
|
||||
|
||||
Builder requestConfigBuilder = RequestConfig.custom();
|
||||
RequestConfig defaultRequestConfig = requestConfigBuilder.build();
|
||||
.orElse(DSpaceHttpClientFactory.getInstance().buildWithRequestConfig(config))) {
|
||||
|
||||
String uri = buildUrl(URL, params.get(URI_PARAMETERS));
|
||||
method = new HttpPost(uri);
|
||||
method.setConfig(defaultRequestConfig);
|
||||
if (StringUtils.isNotBlank(entry)) {
|
||||
method.setEntity(new StringEntity(entry));
|
||||
}
|
||||
setHeaderParams(method, params);
|
||||
|
||||
configureProxy(method, defaultRequestConfig);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Performing POST request to \"" + uri + "\"..." );
|
||||
}
|
||||
@@ -129,17 +120,6 @@ public class LiveImportClientImpl implements LiveImportClient {
|
||||
return StringUtils.EMPTY;
|
||||
}
|
||||
|
||||
private void configureProxy(HttpRequestBase method, RequestConfig defaultRequestConfig) {
|
||||
String proxyHost = configurationService.getProperty("http.proxy.host");
|
||||
String proxyPort = configurationService.getProperty("http.proxy.port");
|
||||
if (StringUtils.isNotBlank(proxyHost) && StringUtils.isNotBlank(proxyPort)) {
|
||||
RequestConfig requestConfig = RequestConfig.copy(defaultRequestConfig)
|
||||
.setProxy(new HttpHost(proxyHost, Integer.parseInt(proxyPort), "http"))
|
||||
.build();
|
||||
method.setConfig(requestConfig);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows to set the header parameters to the HTTP Post method
|
||||
*
|
||||
|
@@ -105,4 +105,13 @@ public class MetadatumDTO {
|
||||
public void setValue(String value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return string representation of MetadatumDTO
|
||||
* @return string representation of format "[schema].[element].[qualifier]=[value]"
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return schema + "." + element + "." + qualifier + "=" + value;
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,85 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.importer.external.metadatamapping.transform;
|
||||
|
||||
import static java.util.Optional.ofNullable;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Optional;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.node.JsonNodeType;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.importer.external.metadatamapping.contributor.JsonPathMetadataProcessor;
|
||||
import org.dspace.util.SimpleMapConverter;
|
||||
|
||||
/**
|
||||
* This class is a Metadata processor from a structured JSON Metadata result
|
||||
* and uses a SimpleMapConverter, with a mapping properties file
|
||||
* to map to a single string value based on mapped keys.<br/>
|
||||
* Like:<br/>
|
||||
* <code>journal-article = Article<code/>
|
||||
*
|
||||
* @author paulo-graca
|
||||
*/
|
||||
public class StringJsonValueMappingMetadataProcessorService implements JsonPathMetadataProcessor {
|
||||
|
||||
private final static Logger log = LogManager.getLogger();
|
||||
/**
|
||||
* The value map converter.
|
||||
* a list of values to map from
|
||||
*/
|
||||
private SimpleMapConverter valueMapConverter;
|
||||
private String path;
|
||||
|
||||
@Override
|
||||
public Collection<String> processMetadata(String json) {
|
||||
JsonNode rootNode = convertStringJsonToJsonNode(json);
|
||||
Optional<JsonNode> abstractNode = Optional.of(rootNode.at(path));
|
||||
Collection<String> values = new ArrayList<>();
|
||||
|
||||
if (abstractNode.isPresent() && abstractNode.get().getNodeType().equals(JsonNodeType.STRING)) {
|
||||
|
||||
String stringValue = abstractNode.get().asText();
|
||||
values.add(ofNullable(stringValue)
|
||||
.map(value -> valueMapConverter != null ? valueMapConverter.getValue(value) : value)
|
||||
.orElse(valueMapConverter.getValue(null)));
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
private JsonNode convertStringJsonToJsonNode(String json) {
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
JsonNode body = null;
|
||||
try {
|
||||
body = mapper.readTree(json);
|
||||
} catch (JsonProcessingException e) {
|
||||
log.error("Unable to process json response.", e);
|
||||
}
|
||||
return body;
|
||||
}
|
||||
|
||||
/* Getters and Setters */
|
||||
|
||||
public String convertType(String type) {
|
||||
return valueMapConverter != null ? valueMapConverter.getValue(type) : type;
|
||||
}
|
||||
|
||||
public void setValueMapConverter(SimpleMapConverter valueMapConverter) {
|
||||
this.valueMapConverter = valueMapConverter;
|
||||
}
|
||||
|
||||
public void setPath(String path) {
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,67 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.importer.external.pubmed.metadatamapping.contributor;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
|
||||
import org.dspace.importer.external.metadatamapping.contributor.SimpleXpathMetadatumContributor;
|
||||
import org.jdom2.Element;
|
||||
import org.jdom2.Namespace;
|
||||
import org.jdom2.filter.Filters;
|
||||
import org.jdom2.xpath.XPathExpression;
|
||||
import org.jdom2.xpath.XPathFactory;
|
||||
|
||||
/**
|
||||
* This class is responsible for extracting the abstract from a PubMed XML document.
|
||||
* It uses XPath to find the relevant elements and constructs a formatted string for the abstract, respecting
|
||||
* PubMed's labelled abstract format, and including the labels in the output.
|
||||
*/
|
||||
public class PubmedAbstractMetadatumContributor extends SimpleXpathMetadatumContributor {
|
||||
|
||||
@Override
|
||||
public Collection<MetadatumDTO> contributeMetadata(Element t) {
|
||||
List<MetadatumDTO> values = new LinkedList<>();
|
||||
|
||||
List<Namespace> namespaces = new ArrayList<>();
|
||||
for (String ns : prefixToNamespaceMapping.keySet()) {
|
||||
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
|
||||
}
|
||||
|
||||
XPathExpression<Element> xpath = XPathFactory.instance().compile(query, Filters.element(), null, namespaces);
|
||||
List<Element> nodes = xpath.evaluate(t);
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
for (Element el : nodes) {
|
||||
String label = el.getAttributeValue("Label");
|
||||
String text = el.getTextNormalize();
|
||||
|
||||
if (text == null || text.isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (sb.length() > 0) {
|
||||
sb.append("\n\n");
|
||||
}
|
||||
|
||||
if (label != null && !label.equalsIgnoreCase("UNLABELLED")) {
|
||||
sb.append(label).append(": ");
|
||||
}
|
||||
sb.append(text);
|
||||
}
|
||||
|
||||
String fullAbstract = sb.toString().trim();
|
||||
if (!fullAbstract.isEmpty()) {
|
||||
values.add(metadataFieldMapping.toDCValue(field, fullAbstract));
|
||||
}
|
||||
return values;
|
||||
}
|
||||
}
|
@@ -24,6 +24,7 @@ import java.util.concurrent.Callable;
|
||||
import com.google.common.io.CharStreams;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.importer.external.datamodel.ImportRecord;
|
||||
import org.dspace.importer.external.datamodel.Query;
|
||||
@@ -54,6 +55,7 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
|
||||
private String urlFetch;
|
||||
private String urlSearch;
|
||||
private String apiKey;
|
||||
|
||||
private int attempt = 3;
|
||||
|
||||
@@ -209,6 +211,9 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
URIBuilder uriBuilder = new URIBuilder(urlSearch);
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder.addParameter("api_key", apiKey);
|
||||
}
|
||||
uriBuilder.addParameter("db", "pubmed");
|
||||
uriBuilder.addParameter("term", query.getParameterAsClass("query", String.class));
|
||||
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
|
||||
@@ -233,7 +238,10 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
String value = null;
|
||||
|
||||
try {
|
||||
SAXBuilder saxBuilder = new SAXBuilder();
|
||||
SAXBuilder saxBuilder = XMLUtils.getSAXBuilder();
|
||||
// To properly parse PubMed responses, we must allow DOCTYPEs overall. But, we can still apply all the
|
||||
// other default XXE protections, including disabling external entities and entity expansion.
|
||||
saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl", false);
|
||||
Document document = saxBuilder.build(new StringReader(src));
|
||||
Element root = document.getRootElement();
|
||||
|
||||
@@ -282,6 +290,9 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
List<ImportRecord> records = new LinkedList<ImportRecord>();
|
||||
|
||||
URIBuilder uriBuilder = new URIBuilder(urlSearch);
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder.addParameter("api_key", apiKey);
|
||||
}
|
||||
uriBuilder.addParameter("db", "pubmed");
|
||||
uriBuilder.addParameter("retstart", start.toString());
|
||||
uriBuilder.addParameter("retmax", count.toString());
|
||||
@@ -312,6 +323,9 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
String webEnv = getSingleElementValue(response, "WebEnv");
|
||||
|
||||
URIBuilder uriBuilder2 = new URIBuilder(urlFetch);
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder2.addParameter("api_key", apiKey);
|
||||
}
|
||||
uriBuilder2.addParameter("db", "pubmed");
|
||||
uriBuilder2.addParameter("retstart", start.toString());
|
||||
uriBuilder2.addParameter("retmax", count.toString());
|
||||
@@ -350,12 +364,10 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
|
||||
private List<Element> splitToRecords(String recordsSrc) {
|
||||
try {
|
||||
SAXBuilder saxBuilder = new SAXBuilder();
|
||||
// Disallow external entities & entity expansion to protect against XXE attacks
|
||||
// (NOTE: We receive errors if we disable all DTDs for PubMed, so this is the best we can do)
|
||||
saxBuilder.setFeature("http://xml.org/sax/features/external-general-entities", false);
|
||||
saxBuilder.setFeature("http://xml.org/sax/features/external-parameter-entities", false);
|
||||
saxBuilder.setExpandEntities(false);
|
||||
SAXBuilder saxBuilder = XMLUtils.getSAXBuilder();
|
||||
// To properly parse PubMed responses, we must allow DOCTYPEs overall. But, we can still apply all the
|
||||
// other default XXE protections, including disabling external entities and entity expansion.
|
||||
saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl", false);
|
||||
Document document = saxBuilder.build(new StringReader(recordsSrc));
|
||||
Element root = document.getRootElement();
|
||||
|
||||
@@ -386,6 +398,9 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
public ImportRecord call() throws Exception {
|
||||
|
||||
URIBuilder uriBuilder = new URIBuilder(urlFetch);
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder.addParameter("api_key", apiKey);
|
||||
}
|
||||
uriBuilder.addParameter("db", "pubmed");
|
||||
uriBuilder.addParameter("retmode", "xml");
|
||||
uriBuilder.addParameter("id", query.getParameterAsClass("id", String.class));
|
||||
@@ -426,6 +441,9 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
public Collection<ImportRecord> call() throws Exception {
|
||||
|
||||
URIBuilder uriBuilder = new URIBuilder(urlSearch);
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder.addParameter("api_key", apiKey);
|
||||
}
|
||||
uriBuilder.addParameter("db", "pubmed");
|
||||
uriBuilder.addParameter("usehistory", "y");
|
||||
uriBuilder.addParameter("term", query.getParameterAsClass("term", String.class));
|
||||
@@ -455,6 +473,9 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
String queryKey = getSingleElementValue(response, "QueryKey");
|
||||
|
||||
URIBuilder uriBuilder2 = new URIBuilder(urlFetch);
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder.addParameter("api_key", apiKey);
|
||||
}
|
||||
uriBuilder2.addParameter("db", "pubmed");
|
||||
uriBuilder2.addParameter("retmode", "xml");
|
||||
uriBuilder2.addParameter("WebEnv", webEnv);
|
||||
@@ -530,4 +551,8 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
this.urlSearch = urlSearch;
|
||||
}
|
||||
|
||||
public void setApiKey(String apiKey) {
|
||||
this.apiKey = apiKey;
|
||||
}
|
||||
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user