Compare commits

...

931 Commits

Author SHA1 Message Date
Tim Donohue
44e7b3aca6 [maven-release-plugin] prepare release dspace-7.0-preview-1 2019-05-17 15:34:04 -05:00
Tim Donohue
2b9d726214 Merge pull request #2430 from atmire/DS-4253-discovery-oom-fix
DS-4253 Avoid oom during discovery reindex
2019-05-17 10:05:32 -05:00
Chris Wilper
7238f42cf4 DS-4253 Avoid oom during discovery reindex 2019-05-16 16:35:30 -04:00
Mark H. Wood
d89186a192 Merge pull request #2392 from mwoodiupui/DS-3658
[DS-3658] Configure ReindexerThread disable reindex
2019-05-15 13:04:03 -04:00
Mark H. Wood
d48aae5baa [DS-3658] Remove JSPUI comment mistakenly reintroduced by comment fix. 2019-05-15 11:41:22 -04:00
Mark H. Wood
8afb630fcc Merge branch 'master' into DS-3658 2019-05-15 11:15:17 -04:00
Mark H. Wood
43444c84b7 [DS-3658] Accept abollini's fix to configuration comments, with minor adjustments. 2019-05-15 11:10:09 -04:00
Tim Donohue
8f472d9197 Merge pull request #2376 from DSpace/configurable_entities
Adding Configurable "Entities" to DSpace 7 (Part 1: Architecture, Configuration, Search, Display)
2019-05-09 15:56:41 -05:00
Tim Donohue
ba267ce72b Merge pull request #2422 from atmire/entities-feedback-2019-05-07
Entities feedback 2019 05 07
2019-05-09 12:54:03 -05:00
Tim Donohue
be4c2f9ad5 Merge branch 'configurable_entities' into entities-feedback-2019-05-07 2019-05-09 12:09:42 -05:00
Tim Donohue
8097589db5 Merge pull request #2418 from atmire/entities-feedback-2019-04
Configurable entities feedback
2019-05-09 12:01:37 -05:00
Ben Bosman
858bf72fa9 Merge remote-tracking branch 'origin/w2p-62187_IT-changes' into entities-feedback-2019-05-07
# Conflicts:
#	dspace-spring-rest/src/test/java/org/dspace/app/rest/RelationshipRestRepositoryIT.java
2019-05-08 16:47:16 +02:00
Raf Ponsaerts
b76aacc626 Added the expect status code in test 2019-05-08 16:23:01 +02:00
Raf Ponsaerts
5c035fe6a6 Refacted the RelationshipRestRepositoryIT to now use a builder instead of the EPersonService when creating a user 2019-05-08 13:12:14 +02:00
Ben Bosman
be9d80c410 Temporarily disabled for https://github.com/DSpace/Rest7Contract/pull/57#discussion_r272605397 2019-05-07 19:54:20 +02:00
Ben Bosman
3f42590fb2 Temporarily disabled for https://github.com/DSpace/Rest7Contract/pull/57#discussion_r272605397 2019-05-07 18:52:58 +02:00
Ben Bosman
5895f08337 Temporarily disabled for https://github.com/DSpace/Rest7Contract/pull/57#discussion_r272605397 2019-05-07 18:27:51 +02:00
Ben Bosman
b5e29a36b8 reordered tests to first verify the normal import, and hereafter verify updating (for logic while reading the IT) 2019-05-07 17:48:38 +02:00
Ben Bosman
bee7313f8a Merge remote-tracking branch 'origin/w2p-62146_initialize-entities-it-changes' into entities-feedback-2019-05-07 2019-05-07 17:47:04 +02:00
Raf Ponsaerts
b98943ff64 Fixed checkstyle 2019-05-07 10:32:35 +02:00
Raf Ponsaerts
71cd11b558 Implemented the InitializeEntitiesIT changes 2019-05-07 09:59:32 +02:00
Raf Ponsaerts
aac6b12de9 Additional pagination tests 2019-05-07 09:19:48 +02:00
Raf Ponsaerts
a3334bd10e Added inline comments to the RelatinshipRestRepositoryIT 2019-05-06 15:48:38 +02:00
benbosman
4ccf73ebb6 Merge pull request #2414 from atmire/entities-feedback-2019-04-29
feedback on configurable entities
2019-05-02 17:10:46 +02:00
benbosman
cd8226cc97 Merge pull request #2419 from atmire/w2p-62057_configurable-entities-feedback
configurable entities feedback
2019-05-02 17:10:03 +02:00
Raf Ponsaerts
6e21bcdad1 Implemented feedback 2019-05-02 15:08:08 +02:00
Raf Ponsaerts
4c6e939e57 Implemented feedback 2019-05-02 13:26:30 +02:00
Raf Ponsaerts
7dfd34036c Implemented feedback 2019-04-30 15:51:28 +02:00
Andrea Bollini
fc05d2dab0 Merge pull request #2411 from atmire/refactoring-instanceof
Small improvements to configurable entities
2019-04-30 09:59:40 +02:00
Ben Bosman
62ca358f33 support for displaying an org unit as author of a publication 2019-04-29 18:15:14 +02:00
Ben Bosman
9a40fd817c store place in relation virtual metadata 2019-04-29 17:59:38 +02:00
Ben Bosman
ccbf681f96 feedback on configurable entities 2019-04-29 15:42:57 +02:00
Ben Bosman
20705e1da1 feedback on configurable entities 2019-04-29 11:54:29 +02:00
Ben Bosman
6707dfb2e6 feedback on configurable entities 2019-04-29 11:50:45 +02:00
benbosman
0bb9880d2c Merge pull request #2413 from atmire/override
override
2019-04-29 11:44:19 +02:00
Ben Bosman
70e34edfef override 2019-04-29 11:13:55 +02:00
Tim Donohue
51aa22cdb9 Merge pull request #2409 from atmire/rename-VirtualBean-VirtualMetadataPopularConfiguration
Rename virtual bean virtual metadata configuration
2019-04-26 10:44:16 -05:00
Ben Bosman
8f56970c33 order of imports 2019-04-26 09:34:37 +02:00
Ben Bosman
09c9d851a5 JavaDoc 2019-04-26 09:32:29 +02:00
Ben Bosman
b331b26f80 Avoid creating a new context 2019-04-25 17:01:23 +02:00
Ben Bosman
b2d28fcf83 Avoid creating a new context 2019-04-25 16:44:16 +02:00
Ben Bosman
7b9f08e7ad JavaDoc 2019-04-25 15:52:57 +02:00
Ben Bosman
658708832a Missing @Override 2019-04-25 15:48:30 +02:00
Ben Bosman
dcf9240f42 Refactoring instanceof 2019-04-25 15:45:24 +02:00
benbosman
d1cd9c4a9b Merge pull request #2408 from atmire/Missing-override
Missing @Override
2019-04-25 15:35:23 +02:00
Ben Bosman
ec8109a67e Missing @Override 2019-04-25 15:34:44 +02:00
Ben Bosman
5a69301212 Rename VirtualBean to VirtualMetadataPopularConfiguration 2019-04-25 15:15:39 +02:00
Ben Bosman
72afed29b8 Rename VirtualBean to VirtualMetadataPopularConfiguration 2019-04-25 15:01:11 +02:00
Ben Bosman
07ce7da613 Merge remote-tracking branch 'community/configurable_entities' into configurable_entities 2019-04-25 14:37:23 +02:00
Ben Bosman
0898e33319 An additional use case in the comments 2019-04-25 14:36:53 +02:00
benbosman
bba8feff5b Merge pull request #2407 from atmire/w2p-62018_update-configurable_entities-with-master
update configurable entities with master
2019-04-25 14:06:34 +02:00
Raf Ponsaerts
d97ea343e4 Merged dspace/master into dspace/configurable_entities 2019-04-25 13:30:25 +02:00
Tim Donohue
afc6682bfb Merge pull request #2391 from 4Science/DS-4166_mydspace
DS-4166 Index workspace, workflow and tasks in SOLR
2019-04-24 10:03:35 -05:00
Tim Donohue
1bf1131237 Merge pull request #2404 from atmire/w2p-61912_initialize-entities-update
DS-4218 - Support for second "initialize-entities"
2019-04-24 10:02:27 -05:00
Ben Bosman
c83ae49e7f Feedback on PR-2404
Missing context complete, this was lost in the previous commit from this PR
2019-04-24 09:50:11 +02:00
Tim Donohue
b30e1cc6f9 Merge pull request #2216 from tomdesair/DS-4006_EPerson-Group-Relation-and-Embeds
DS-4006: Embed membership groups on EPerson REST object
2019-04-23 16:23:53 -05:00
benbosman
fe0a67ca43 Merge pull request #15 from AlexanderS/w2p-61912_initialize-entities-update
Small indentation fix
2019-04-18 12:54:41 +02:00
Alexander Sulfrian
f8b0eac67a Fix indentation 2019-04-18 12:15:17 +02:00
Raf Ponsaerts
22b297b207 Added IT for the updated InitializeEntities script 2019-04-18 08:47:33 +02:00
Raf Ponsaerts
cfd4e0b435 Updated the InitializeEntities implementation and added a unique constraint to RelationshipType table based on types and labels 2019-04-17 13:41:24 +02:00
Andrea Bollini
5338a88892 DS-4166 community feedback: postpone support for highlighting over authority controlled metadata 2019-04-16 22:37:02 +02:00
Tim Donohue
b0bff18b53 Merge pull request #2257 from mspalti/eperson_update
[DS-4062] Endpoint to allow logged in EPerson to change password or other profile information.
2019-04-15 16:08:21 -05:00
Michael W Spalti
4fe26b914e [DS-4062] Updated task and workflow plugins to implement the new permission method. 2019-04-12 17:15:20 -07:00
Michael W Spalti
ca02d36933 [DS-4062] Added support for eperson updates by the currently logged in user.
Removed misplaced factory call.

Removed misplaced factory call.

Added exception and updated signature for the eperson repository patch method.

Removed patch endpoint from the authentication controller and modifed eperson repository to use Spring PreAuthorize annotation and the eperson permission evaluator plugin.

[DS-4062] Removed unused autowired bean from DSpaceRestRepository.

No longer needed after the previous refactor to remove endpoint.

[DS-4062] Removed unused import.

[DS-4062] The EPersonRestRepository updated to hasPermission annontation only.

Also limiting (experimentally) some eperson patch operations to administrators.

Added adminstartor restriction for netid patch operation.

Removed support for patching eperson profile metadata.

Removed support for patching eperson profile metadata.

Added eperson email patch operation.

Updated permission plugins to support patch requests per suggestion by @tomdesair.

Updated eperson authentication plugin and added unit test.

Added the new PreAuthorize annotations for patch.

Added the missing header reported by checkstyle.

Changed order of static imports in unit test to pass checkstyle.

Added integration test to verify that a non-admin user cannot update another eperson password.
2019-04-12 16:25:37 -07:00
benbosman
489ed8350b Merge pull request #2401 from atmire/entities-feedback
Entities feedback
2019-04-09 17:25:32 +02:00
Ben Bosman
62c0f0714d Feedback on PR-2376 2019-04-09 16:01:56 +02:00
Ben Bosman
a23a8821e6 Feedback on PR-2376 2019-04-09 15:16:17 +02:00
benbosman
2ed91c616c Merge pull request #2400 from atmire/w2p-61605_move-virtual-metadata-to-separate-spring-file
Moved the virtual metadata populator xml logic to a separate file
2019-04-09 15:04:13 +02:00
Raf Ponsaerts
750bdecbc0 Moved the virtual metadata populator xml logic to a separate file 2019-04-09 14:16:37 +02:00
Andrea Bollini
84eabcbf91 DS-4166 community feedback: renamed FindableObjectService in IndexableObjectService 2019-04-09 11:50:02 +02:00
benbosman
b0c6b912b3 Merge pull request #2399 from atmire/w2p-61536_update-configurable-entities-with-latest-master
update configurable entities with latest master
2019-04-08 17:12:27 +02:00
Raf Ponsaerts
92cb9730fb Changed the URI parsing to better fit a generic approach on Repository level 2019-04-08 16:16:13 +02:00
Raf Ponsaerts
4f3885738a Fixed tests and refactored the way priorities work in the builders 2019-04-08 14:56:23 +02:00
Raf Ponsaerts
4fc6ebc28c Intermediary Commit 2019-04-05 15:46:18 +02:00
Raf Ponsaerts
211e8f72a1 Merged dspace/master into configurable_entities 2019-04-05 13:45:01 +02:00
Andrea Bollini
f8cedefa29 DS-4166 community feedback: improved javadocs and comments 2019-04-05 13:30:36 +02:00
Andrea Bollini
20a0e42098 DS-4166 fix configuration of the submitter facet 2019-04-05 08:46:15 +02:00
Andrea Bollini
d23ebd66c0 DS-4166 fix configuration of the namedresourcetype facet 2019-04-04 15:53:18 +02:00
Andrea Bollini
504cb3c5cf DS-4166 community feedback: merge -i and -item_uuid options 2019-04-04 13:09:02 +02:00
Andrea Bollini
6fd5a76837 DS-4166 community feedback: code cleanup 2019-04-04 12:56:55 +02:00
Tim Donohue
e17e8eea08 Merge pull request #2388 from atmire/configurable_entities
DS-4121: Refactor Entities ITs
2019-04-02 10:23:05 -05:00
Tim Donohue
1e6497f1d6 Merge pull request #2379 from AndrewZWood/DS-4194
DS-4194 Use lazy fetching iterator for long query results
2019-03-29 15:08:25 -05:00
Tim Donohue
77928c2560 Add info about how/when auto-reindexing takes place 2019-03-28 12:57:02 -04:00
Andrew Wood
0324787e35 DS-3658 Document new property reflect default behavior in value 2019-03-28 12:57:02 -04:00
Andrew Wood
2dd14a06ba DS-3658 Configure ReindexerThread disable reindex 2019-03-28 12:57:02 -04:00
Andrea Bollini
5e3164bd06 DS-4166 community feedback: use a more appropriate exception 2019-03-28 15:38:39 +01:00
Andrea Bollini
8de7a50f5b DS-4166 community feedback: report about not existing uuid 2019-03-28 15:20:49 +01:00
Andrea Bollini
a8190fe27a DS-3851 add test and fix for invalid task claiming 2019-03-28 14:42:03 +01:00
Andrea Bollini
30899b0e55 DS-4166 move IndexableObject to the discovery package 2019-03-28 13:20:47 +01:00
Andrea Bollini
eba97f4080 Merge branch 'master' of https://github.com/DSpace/DSpace into mydspace_clean 2019-03-28 13:15:21 +01:00
Andrea Bollini
689ac4e3bd DS-4166 community feedback: implement the IndexableObject interface only where really needed 2019-03-28 12:06:22 +01:00
Yana De Pauw
b71b6e9f1b Fix check style issues in RelationShipTypeBuilder 2019-03-28 11:13:35 +01:00
Andrea Bollini
1252075ac9 DS-4166 community feedback: remove multithreads indexing support 2019-03-28 09:02:58 +01:00
Andrea Bollini
cc11f49556 DS-4166 community feedback: remove unused methods and configurations 2019-03-28 09:02:16 +01:00
Andrea Bollini
272f21a8dd DS-4166 community feedback: improve documentation 2019-03-28 00:22:43 +01:00
Andrea Bollini
006b938c4a DS-4166 community feedback: use dedicated fields for workspace/workflow searches 2019-03-28 00:09:13 +01:00
Andrea Bollini
d25463fedb Merge pull request #2312 from 4Science/DS-3851_workflow_new
DS-3851 Endpoint to interact with the workflow
2019-03-28 00:05:08 +01:00
Andrea Bollini
15f6ced31a DS-3851 community feedback: typo 2019-03-27 22:43:30 +01:00
Andrea Bollini
b98d8f4d21 DS-4166 community feedback: rename resultObject to indexableObject in the discover REST result 2019-03-27 20:51:01 +01:00
Andrea Bollini
4b85bf4c56 DS-4166 community feedback: rename BrowsableObject to IndexableObject 2019-03-27 20:31:22 +01:00
Yana De Pauw
13ce03c8c7 Refactor Entity ITs to not use the xml file but builders 2019-03-27 13:19:28 +01:00
benbosman
85480b9ec7 Merge pull request #2383 from atmire/Feedback-PR-2376
Feedback on PR-2376
2019-03-26 13:00:18 +01:00
Andrea Bollini
8c28d5136e DS-3851 community feedback: deal properly with wrong requests 2019-03-26 12:56:48 +01:00
Ben Bosman
b828e56bb8 Squashed feedback on PR-2376 2019-03-25 14:10:28 +01:00
Tim Donohue
7f2e696949 Merge remote-tracking branch 'upstream/master' into configurable_entities 2019-03-22 19:54:03 +00:00
Terry Brady
0cc2226b25 Merge pull request #2373 from the-library-code/DS-4189
DS-4189 Enhance configuration by environment variables
2019-03-22 15:48:10 -04:00
Tim Donohue
f56996634b Merge branch 'master' into configurable_entities 2019-03-22 19:12:19 +00:00
Terry Brady
aa2a5f8b6e Update config-definition.xml 2019-03-22 15:12:14 -04:00
Tim Donohue
a857932393 Merge pull request #2058 from mwoodiupui/DS-3695
[DS-3695] Upgrade Solr to 7.3.1.
2019-03-22 10:47:43 -05:00
Mark H. Wood
62928cedb6 [DS-3695] Remove unused 'solr.server.version' POM property. 2019-03-22 11:20:47 -04:00
Terry Brady
808f04852b Update DSpaceEnvironmentConfiguration.java 2019-03-21 14:34:35 -04:00
Terry Brady
a39495bef5 Add replacement for dash and dot 2019-03-21 14:13:32 -04:00
Andrew Wood
51cd8fa46a DS-4194 Use lazy fetching iterator for long query results 2019-03-20 16:29:22 -04:00
Pascal-Nicolas Becker
1cfc53db5c DS-4189 Enhance configuration by environment variables 2019-03-18 02:22:23 +01:00
Andrea Bollini
5d997e6cde DS-3851 manually solve unnoted merge conflict 2019-03-15 22:21:45 +01:00
Andrea Bollini
1df2d35c1f Merge branch 'master' of https://github.com/DSpace/DSpace into DS-3851_workflow_new 2019-03-15 22:05:53 +01:00
Tim Donohue
b4e7d62725 Merge pull request #2375 from atmire/w2p-61144_place-in-metadata
Configurable Entities - Item metadata place
2019-03-15 11:48:10 -05:00
Tim Donohue
828861a871 Merge pull request #2331 from atmire/w2p-58898_place-column-calculation-error
Mixing entities and plain-text values
2019-03-15 10:58:34 -05:00
Raf Ponsaerts
00a4863178 Removed the jsonIgnore from place attribute in MetadataValueRest and fixed IT 2019-03-15 14:34:29 +01:00
benbosman
02d8e5f750 Merge pull request #13 from atmire/w2p-58898_place-column-calculation-error-prepare-merge
W2p 58898 place column calculation error prepare merge
2019-03-15 13:56:39 +01:00
Raf Ponsaerts
9678dbf573 Merged configurable_entities into w2p-58898_place-column-calculation-error-prepare-merge 2019-03-15 13:27:24 +01:00
Andrea Bollini
eee2929e51 DS-3851 remove unused import 2019-03-15 00:42:16 +01:00
Andrea Bollini
b481fbae17 DS-3851 community feedback: remove unused variable 2019-03-14 23:57:25 +01:00
Andrea Bollini
19606943b1 DS-3851 community feedback: code cleanup 2019-03-14 23:49:35 +01:00
Raf Ponsaerts
13f9c8e1e5 Merged dspace/master into w2p-58898_place-column-calculation-error-prepare-merge 2019-03-14 15:08:42 +01:00
Tim Donohue
7e4e22d472 Merge pull request #2367 from tdonohue/coveralls-refactor
Refactor Coveralls.io plugin to only run after successful build/test in Travis CI
2019-03-13 16:19:48 -05:00
Tim Donohue
b891f3190d Merge pull request #2208 from atmire/DS-4014_CORS-headers-missing-when-generic-exception-is-thrown
[DS-4014] added a catch in the DSpaceApiExceptionControllerAdvice for…
2019-03-13 14:38:14 -05:00
Tim Donohue
c87cd80ac3 Merge pull request #2372 from atmire/configurable_entities-prepare-for-merge
Configurable entities prepare for merge
2019-03-13 10:23:50 -05:00
Raf Ponsaerts
65fb4674a0 Merged master into configurable_entities-prepare-for-merge 2019-03-13 14:34:13 +01:00
Raf Ponsaerts
970b66edb2 Applied feedback; implemented context authorisation logic in tests 2019-03-13 13:10:49 +01:00
Andrea Bollini
3e22142aa7 DS-3851 cleanup the BrowsableObject interface 2019-03-12 23:31:17 +01:00
Andrea Bollini
e883c95de8 DS-3851 cleanup the BrowsableObject interface 2019-03-12 22:17:22 +01:00
Tim Donohue
e48fef5f62 Merge pull request #2332 from atmire/w2p-57159_permission-to-create-relations
Relationship CRUD
2019-03-12 08:17:13 -05:00
Tim Donohue
39e04c2223 Refactor Coveralls plugin to only run after successful build/test. Build can be limited to one step, too. 2019-03-11 19:09:13 +00:00
Tim Donohue
efbffe4ea5 Merge pull request #2313 from atmire/DS-3908_Metadata_patch_support
DS-3908 Metadata patch support
2019-03-08 11:38:08 -06:00
Chris Wilper
337daef738 DS-3908 Restore authz state as early as possible in tests 2019-03-07 12:32:23 -05:00
Chris Wilper
2171b7ff83 DS-3908 Address minor checkstyle issues 2019-03-07 10:35:17 -05:00
Chris Wilper
21fe803b53 DS-3908 Restore auth system state where appropriate 2019-03-07 09:59:19 -05:00
Chris Wilper
96d544d075 DS-3908 Improve javadocs 2019-03-07 09:54:24 -05:00
Andrea Bollini
000c47e7a3 DS-3851 demostrate that admins cannot manipulate tasks 2019-03-07 15:28:55 +01:00
Chris Wilper
229b9f1039 Merge branch 'master' into DS-3908_Metadata_patch_support 2019-03-07 08:32:57 -05:00
Andrea Bollini
cae9a6baa8 DS-4166 fix converter for authority based filter 2019-03-07 09:17:44 +01:00
Andrea Bollini
499fdcd61c DS-4166 renamed BrowsableDSpaceObject in BrowsableObject 2019-03-07 01:11:14 +01:00
Andrea Bollini
6a5838461a Merge branch 'DS-3851_workflow_new' of https://github.com/4Science/DSpace into DS-4166_mydspace 2019-03-07 00:49:46 +01:00
Andrea Bollini
0a54d4cf1e DS-3851 Add ITs for unauthorized approve/reject and improve error handling 2019-03-06 23:54:40 +01:00
Andrea Bollini
1c0e29bc64 DS-3851 raise events in the service layer 2019-03-06 22:44:25 +01:00
Andrea Bollini
a596a5cabf DS-3851 use MediaTypes constant from spring 2019-03-06 22:42:44 +01:00
Andrea Bollini
23bbf45048 DS-3851 remove unused methods 2019-03-06 22:13:26 +01:00
Andrea Bollini
d751d5dc59 DS-3851 add fixme to remember the need to remove getWorkflow group methods 2019-03-06 22:13:09 +01:00
Andrea Bollini
65d3495e18 DS-3851 raise events in the service layer 2019-03-06 19:36:24 +01:00
Andrea Bollini
15c4a33d77 DS-3851 add fixme notice for future refactoring of the uri-list processing 2019-03-06 19:35:36 +01:00
Andrea Bollini
ef10d47156 DS-3851 Add native support for pagination in the findBySubmitter 2019-03-06 18:00:30 +01:00
Andrea Bollini
434c0fa748 DS-3851 add IT to pass through a whole workflow 2019-03-06 17:04:25 +01:00
Andrea Bollini
4475eca5b2 DS-3851 code cleanup: community feedback 2019-03-06 15:51:00 +01:00
Raf Ponsaerts
6056bf94e0 Added the comment to the context.turnOffAuthorizationSystem 2019-03-06 14:54:01 +01:00
Raf Ponsaerts
e3fc9a52a7 Merge branch 'w2p-57159_permission-to-create-relations' into w2p-58898_place-column-calculation-error 2019-03-06 14:46:40 +01:00
Tom Desair
aa0a2a3b05 Remove unused import 2019-03-06 08:17:49 +01:00
Tom Desair
10fc1afaa3 Fix ignored delete EPerson test 2019-03-05 23:36:44 +01:00
Tom Desair
bfc91292d2 DS-4014: Correct status code for AccessDeniedException with authorized users 2019-03-05 23:35:49 +01:00
Raf Ponsaerts
15faacc70c [DS-4014] changed the exception handling to now look for the ResponseStatus on the Exception Class, if it doesn't exist we simply return a 500. AccessDeniedException does not have a ResponseStatus annotation but it has to be handled correctly, therefor a specific catch clause is required 2019-03-05 23:13:14 +01:00
Raf Ponsaerts
df1e464939 [DS-4014] fixed tests and added more exception catches 2019-03-05 23:13:14 +01:00
Raf Ponsaerts
7c122e55aa [DS-4014] added a catch in the DSpaceApiExceptionControllerAdvice for a generic exception and throws a 500 with cors headers available 2019-03-05 23:13:14 +01:00
Tom Desair
0961c5040d DS-4006: Added explicit check for "groups" link in Group Rest object 2019-03-05 22:36:13 +01:00
Tom Desair
aef31c8d37 DS-4006: Embed membership groups on EPerson REST object 2019-03-05 22:11:46 +01:00
Terry Brady
d7ec9e537a Merge pull request #2348 from Georgetown-University-Libraries/ds4167
[DS-4167] Migrate update-sequences.sql to `database` command
2019-03-05 10:56:44 -08:00
Raf Ponsaerts
033790e1b2 Added missing context.restoreAuthSystemState 2019-03-05 14:17:08 +01:00
Andrea Bollini
7f94383719 DS-4166 Fix SearchMatcher to be more strict 2019-03-03 22:12:49 +01:00
Andrea Bollini
e929c39fb2 DS-4166 add ITs for workspace/workflow discover configuration 2019-03-03 19:51:57 +01:00
Giuseppe
d5b4963e26 Update submission-forms.xml
Replaced twobox field with tag field
2019-02-28 17:31:09 +01:00
Andrea Bollini
b3423770dd DS-4166 restore official name to the configuration parameter 2019-02-28 15:56:45 +01:00
Mark H. Wood
999e912909 Merge pull request #2180 from mwoodiupui/DS-3989
[DS-3989] Always capture curation task output.
2019-02-27 16:12:07 -05:00
Andrea Bollini
9d378143a1 DS-4166 restore code removed by incident rebasing the PR 2019-02-26 14:47:51 +01:00
Chris Wilper
185766d8d2 DS-3908 Add DSO metadata PATCH tests 2019-02-25 19:26:40 -05:00
Chris Wilper
1ad0c88c3c DS-3908 Add PATCH support for DSO metadata 2019-02-25 19:26:40 -05:00
Terry Brady
4b6611353f Merge pull request #2354 from Georgetown-University-Libraries/ds4173
[DS-4173] Docker build - set build dir owner to dspace
2019-02-25 14:35:08 -08:00
Terry Brady
644970eba4 set build dir owner to dspace 2019-02-25 08:38:18 -08:00
Andrea Bollini
ff298b393f DS-4166 fix existing discover tests 2019-02-25 10:03:15 +01:00
Andrea Bollini
c56e41aec2 DS-4166 Index workspace, workflow and tasks in SOLR 2019-02-25 10:03:15 +01:00
Andrea Bollini
1e3e37d11e DS-3851 code cleanup: community feedback 2019-02-24 21:06:52 +01:00
Andrea Bollini
b0f8f828b7 Fix issue with the Builder comparator and cleanup of workflowgroups 2019-02-24 19:36:40 +01:00
Andrea Bollini
b7b3a74f85 Merge branch 'master' of https://github.com/DSpace/DSpace into DS-3851_workflow_new 2019-02-24 16:41:52 +01:00
Andrea Bollini
5b98f05e70 DS-3851 fix priority of Metadata Schema and MetadataField builder 2019-02-24 15:37:14 +01:00
Andrea Bollini
7ad47ad705 DS-3851 code cleanup: community feedback 2019-02-24 15:29:23 +01:00
Andrea Bollini
4858aa75d2 DS-3851 fix bugs discovered by ITs 2019-02-24 14:22:10 +01:00
Andrea Bollini
dbddc2f06a DS-3851 add ITs for the task repositories endpoints 2019-02-24 14:21:16 +01:00
Andrea Bollini
ea2ce38677 max upload must be set in the application.properties 2019-02-23 22:33:00 +01:00
Tim Donohue
cc21394276 Merge pull request #2287 from atmire/DS-4107_Metadata_as_map
DS-4107 Represent DSO metadata as a map in REST
2019-02-21 14:17:30 -06:00
Andrea Bollini
8c0a94e689 DS-3851 add ITs for the workflowitems endpoint 2019-02-21 15:52:10 +01:00
Andrea Bollini
3c8186e7c0 typo in the comment 2019-02-21 15:11:06 +01:00
Andrea Bollini
3c9626a5ef DS-3851 force a predictable order to simplify testing 2019-02-21 15:10:23 +01:00
Andrea Bollini
9f44be85a3 Allow to configure max upload 2019-02-21 15:07:54 +01:00
Raf Ponsaerts
800f254677 Merge branch 'w2p-57159_permission-to-create-relations' of https://github.com/atmire/DSpace into w2p-57159_permission-to-create-relations 2019-02-21 09:21:52 +01:00
Raf Ponsaerts
65a94cdda7 Implemented an IT for the text/uri-list parsing 2019-02-21 09:21:11 +01:00
Andrea Bollini
70091e59b8 DS-3851 partially remove legacy workflow to simplify testing
First tests for the workflow endpoint
2019-02-20 23:11:04 +01:00
Terry Brady
ac83a4edef Merge pull request #2344 from terrywbrady/ds4126m
[DS-4126] master: Optimize docker builds
2019-02-20 13:16:01 -08:00
Chris Wilper
0609a6f0b7 DS-4107 Improve javadocs 2019-02-20 14:48:24 -05:00
Chris Wilper
60ea589296 DS-4107 Represent DSO metadata as a map in REST 2019-02-20 14:48:20 -05:00
Terry Brady
35ca48e17a Merge pull request #15 from mwoodiupui/DS-4167
[DS-4167] Fix various problems with Oracle script.
2019-02-20 09:02:11 -08:00
Terry Brady
653bfceac8 Merge branch 'ds4167' into DS-4167 2019-02-20 09:02:00 -08:00
Terry Brady
35e98f0d4b v1 oracle sql 2019-02-20 08:40:34 -08:00
Ben Bosman
1989cd9ff4 JavaDoc 2019-02-20 17:26:43 +01:00
Mark H. Wood
3302835650 [DS-4167] Fix various problems with Oracle script. 2019-02-20 11:23:21 -05:00
Raf Ponsaerts
91caed0cf6 Refactored tests 2019-02-20 14:39:32 +01:00
Raf Ponsaerts
a0ebd616f2 Processed feedback 2019-02-20 13:11:39 +01:00
Raf Ponsaerts
7f194dfa95 Applied feedback 2019-02-20 11:28:34 +01:00
Raf Ponsaerts
53580fa228 Merged w2p-57159_permission-to-create-relations into w2p-58898_place-column-calculation-error 2019-02-20 09:31:40 +01:00
Terry Brady
7773b46aca update sql 2019-02-19 10:16:21 -08:00
Terry Brady
89c1cc7729 remove /org 2019-02-19 07:34:56 -08:00
Terry Brady
f829b7df5a remove dspace/etc from build steps 2019-02-19 07:11:23 -08:00
Terry Brady
4ac06271c8 attempt to fix headers 2019-02-19 06:53:30 -08:00
Terry Brady
22cc5469a2 simplify string extract from stream 2019-02-19 06:30:25 -08:00
Terry Brady
5f47771e4e remove /etc directory 2019-02-19 06:26:28 -08:00
Terry Brady
e421a7c7c7 apply review feedback 2019-02-19 06:26:12 -08:00
Andrea Bollini
d3d3b57133 DS-3802 switch to the local LateObjectEvaluator 2019-02-19 08:48:39 +01:00
Andrea Bollini
c7c204091f Revert "Rely on Spring LateObjectEvaluator instead than our custom porting"
This reverts commit 821d106439.
2019-02-19 08:24:17 +01:00
Raf Ponsaerts
37c3ab9e17 Applied feedback 2019-02-18 09:23:39 +01:00
Terry Brady
b6f73682a3 Migrate postgres update-sequences.sql 2019-02-15 15:33:39 -08:00
Mark H. Wood
59bf702500 [DS-3695] Record string value of object ID in usage statistics. 2019-02-15 14:08:18 -05:00
Mark H. Wood
501ea5b6a6 Revert "[DS-3695] Add missing query defaults in a method I overlooked."
This reverts commit 6ecc3b2351.
2019-02-15 12:04:15 -05:00
Mark H. Wood
4b59ef27fc Revert "[DS-3695] Replace removed default schema settings with query parameters."
This reverts commit 69fcb0ebce.
2019-02-15 12:01:05 -05:00
Mark H. Wood
0194f557c0 [DS-3695] Re-introduce configured default match operator, since I found to where they moved it. Default search field was already configured. 2019-02-15 11:59:08 -05:00
Mark H. Wood
caf95d5acb Merge pull request #2347 from mwoodiupui/DS-4163-7
[DS-4163] OAI exception on empty list
2019-02-14 13:49:37 -05:00
ssolim
0b12168218 XOAI.java skip empty list and dont add it to solr 2019-02-14 13:24:46 -05:00
Tim Donohue
44a3111e41 Merge pull request #2291 from atmire/manage_metadata_registry
Manage metadata registry
2019-02-14 11:19:05 -06:00
Andrea Bollini
6dd560bd92 Merge branch 'master' of https://github.com/DSpace/DSpace into DS-3851_workflow_new 2019-02-14 15:00:07 +01:00
Mark H. Wood
dd8a7c1fea [DS-3695] Remove commented elements and comments not about DSpace from solrconfig.xml. 2019-02-14 08:57:27 -05:00
Terry Brady
e6860e3724 Set solr.server to maatch docker config 2019-02-13 12:29:22 -08:00
Mark H. Wood
01b9707347 [DS-3695] Placate the style checker, and simplify a try block. 2019-02-13 14:39:11 -05:00
Mark H. Wood
8c971d9acd [DS-3695] Solr UUID field wants a *String* value. 2019-02-13 13:56:37 -05:00
Mark H. Wood
6ecc3b2351 [DS-3695] Add missing query defaults in a method I overlooked. 2019-02-13 13:54:53 -05:00
Mark H. Wood
cd2e6a7878 [DS-3695] Add license boilerplate to new Docker configuration 2019-02-13 12:41:12 -05:00
Mark H. Wood
589b719895 Merge pull request #14 from Georgetown-University-Libraries/mwoodiupui-DS-3695
Dockerfile support for external solr
2019-02-13 11:47:43 -05:00
Mark H. Wood
c1800427bd Merge branch 'DS-3695' into mwoodiupui-DS-3695 2019-02-13 11:39:03 -05:00
Terry Brady
cb3276b02e remove core.properties names 2019-02-13 08:28:33 -08:00
Mark H. Wood
69fcb0ebce [DS-3695] Replace removed default schema settings with query parameters. 2019-02-13 11:27:09 -05:00
Mark H. Wood
732bc7d92f [DS-3695] Tell developers where to find the example schema, for
comments and examples of what you can do here.
2019-02-13 11:27:09 -05:00
Mark H. Wood
e9296ac8a8 [DS-3695] All tests should start with an empty Solr. 2019-02-13 11:27:09 -05:00
Mark H. Wood
cd5911a68f [DS-3695] Add docValues to all *PointField by default to support faceting.
Some of these may be unnecessary, but I don't know on which we facet.
2019-02-13 11:27:09 -05:00
Mark H. Wood
ad50b73fd9 [DS-3695] Give the Solr admin. a clue about the purpose of each core. 2019-02-13 11:27:09 -05:00
Mark H. Wood
70b5d6bf4f [DS-3695] Reintroduce "ignored" fieldType, even though the field is
probably not used.
2019-02-13 11:27:09 -05:00
Mark H. Wood
0fa69a7f09 [DS-3695] Whoops, missed a few Trie*Field references. 2019-02-13 11:27:09 -05:00
Mark H. Wood
3bcb2edd1c [DS-3695] Remove redundant types, irrelevant attributes; tidy layout. 2019-02-13 11:27:09 -05:00
Mark H. Wood
094db7217a [DS-3695] Remove unused "fieldType"s, dusty old comments from stock
sample schema.  Tidy indentation, break very long elements into
multiple lines.
2019-02-13 11:27:09 -05:00
Mark H. Wood
687f04e269 [DS-3695] Rip out big handfuls of unused fieldtypes, commentary about
Solr not DSpace.  Break loooong tags into attribute-per-line format.
2019-02-13 11:27:09 -05:00
Mark H. Wood
0f83c7fe8b [DS-3695] We no longer control Solr's logging. 2019-02-13 11:27:09 -05:00
Mark H. Wood
17ffe721da [DS-3695] See bf4ead40575f0b180fd6840373ef17d98a6e778e. We *do* configure Solr for testing. 2019-02-13 11:27:09 -05:00
Mark H. Wood
bb350ca420 [DS-3695] Upgrade indexes all the way to 7_x. 2019-02-13 11:27:09 -05:00
Mark H. Wood
919ce45338 [DS-3695] We no longer configure Solr itself. 2019-02-13 11:27:09 -05:00
Mark H. Wood
da9d5c7452 [DS-3695] Remaining minimal changes to make all cores load in Solr 7. 2019-02-13 11:27:09 -05:00
Mark H. Wood
bce42a47f6 [DS-3695] Cure failing IT: the test was wrong. 2019-02-13 11:27:09 -05:00
Mark H. Wood
5db8a6a889 [DS-3695] Make 'search' core load in stock Solr 7.2.1.
This should work without altering Solr, across Solr releases, as long
as Solr ships the necessary additional analyzers in /contrib.
2019-02-13 11:27:09 -05:00
Mark H. Wood
38a88d8afb [DS-3695] Start ripping out Solr server. 2019-02-13 11:27:09 -05:00
Mark H. Wood
975ebb0b5b [DS-3695] Document what I puzzled out ot MockSolrServer, and small cleanups. 2019-02-13 11:27:09 -05:00
Tim Donohue
e928b94d07 Fix Solr startup errors by downgrading to 7.3.1 2019-02-13 11:27:09 -05:00
Mark H. Wood
63c43eab15 [DS-3695] Exclude Jetty from solr-core and solr-cell: Solr and Spring
Boot are fighting over versions.
2019-02-13 11:27:09 -05:00
Mark H. Wood
070c21a113 [DS-3695] Switch new class from SolrServer to SolrClient. 2019-02-13 11:27:09 -05:00
Mark H. Wood
e210e3ca35 [DS-3695] Complete botched conflict fixup. 2019-02-13 11:27:09 -05:00
Tim Donohue
0c7f3c0e90 Update to Solr 7.5. Sync dependencies and cleanup spring-rest POM 2019-02-13 11:27:09 -05:00
Tim Donohue
8323ccd051 Disable Solr autoconfiguration in Spring Boot. Minor config cleanup 2019-02-13 11:27:09 -05:00
Tim Donohue
205db5c6f8 Revert Spring Boot updates until DS-3802 is solved. Solr core only for testing. 2019-02-13 11:27:09 -05:00
Tim Donohue
174a77cde0 Fix minor compilation errors in OAI 2019-02-13 11:27:09 -05:00
Mark H. Wood
85c09cbb55 [DS-3695] Start work on dspace-spring-rest 2019-02-13 11:27:09 -05:00
Mark H. Wood
f09287c56b [DS-3695] Rip out lots of Solr config. that is no longer defined in v7. 2019-02-13 11:27:09 -05:00
Mark H. Wood
8a76b77e66 [DS-3695] Upgrade Solr *client* to 7.3.0. 2019-02-13 11:27:09 -05:00
Tim Donohue
db707750fe Merge pull request #2333 from kshepherd/DS-4136_oai_import_master
[DS-4136] Master port: Improve OAI import performance for a large install
2019-02-13 09:15:32 -06:00
Raf Ponsaerts
d450b59899 Applied feedback and added tests 2019-02-13 13:08:30 +01:00
Terry Brady
7115173d61 Support Docker testing of externalized solr 2019-02-12 18:46:02 -08:00
Kim Shepherd
7cde38d229 [DS-4136] Tidy up comments, only display per-1k msg if batch size !1000 2019-02-13 13:03:37 +13:00
Terry Brady
ed41d852b1 Merge branch 'DS-3695' of https://github.com/mwoodiupui/DSpace into mwoodiupui-DS-3695 2019-02-12 14:50:33 -08:00
Mark H. Wood
8c222b9826 [DS-3695] All tests should start with an empty Solr. 2019-02-11 10:59:07 -05:00
Mark H. Wood
6254cedd21 [DS-3695] Add docValues to all *PointField by default to support faceting.
Some of these may be unnecessary, but I don't know on which we facet.
2019-02-08 14:01:16 -05:00
Mark H. Wood
0fc979a2ad [DS-3695] Give the Solr admin. a clue about the purpose of each core. 2019-02-08 11:09:09 -05:00
Raf Ponsaerts
93d982dfc8 Applied feedback and fixed test cases 2019-02-08 09:37:03 +01:00
Tim Donohue
e517ba49a5 English grammar corrections 2019-02-08 09:36:29 +01:00
Tim Donohue
d7c5e224e1 More minor corrections to English 2019-02-08 09:36:29 +01:00
Tim Donohue
d0db6e533a Minor improvements to English in error messages. 2019-02-08 09:36:29 +01:00
Samuel
34127e4e0f Added support for the CRUD operations on the MetadataField and MetadataSchema REST endpoints - feedback 2019-02-08 09:36:29 +01:00
Samuel
0a7a8aab8e Added support for the CRUD operations on the MetadataField and MetadataSchema REST endpoints - feedback 2019-02-08 09:36:28 +01:00
Samuel
42942978a8 Added support for the CRUD operations on the MetadataField and MetadataSchema REST endpoints 2019-02-08 09:36:02 +01:00
Mark H. Wood
be453a0e55 [DS-3695] Reintroduce "ignored" fieldType, even though the field is
probably not used.
2019-02-07 16:23:27 -05:00
Mark H. Wood
77328c388d [DS-3695] Whoops, missed a few Trie*Field references. 2019-02-07 15:44:34 -05:00
Mark H. Wood
13c0b9b227 [DS-3695] Remove redundant types, irrelevant attributes; tidy layout. 2019-02-07 15:33:29 -05:00
Tim Donohue
211545a9d0 Merge pull request #2290 from atmire/rest_item_crud
Added support for the CRUD operations on the Item rest endpoint
2019-02-07 13:43:44 -06:00
Tim Donohue
2b1b10100a Merge pull request #2343 from atmire/DS-4164_error-handling-bug-in-open-search-controller
[DS-4164] fixed the bug in the error handling
2019-02-07 12:03:27 -06:00
Mark H. Wood
416732921c [DS-3695] Remove unused "fieldType"s, dusty old comments from stock
sample schema.  Tidy indentation, break very long elements into
multiple lines.
2019-02-07 09:48:08 -05:00
Raf Ponsaerts
aa4125c9e9 Removed the name check from the DspaceObjectRestEqualityUtils 2019-02-07 15:39:15 +01:00
Ben Bosman
15eb85eb08 Check for null values
Use typed Maps
2019-02-07 12:55:02 +01:00
Terry Brady
d90e60a92b Sync docker solr web.xml with web.xml 2019-02-06 15:28:53 -08:00
Terry Brady
d489636641 Migrate PR2307 2019-02-06 10:24:35 -08:00
Raf Ponsaerts
78f7066c94 [DS-4164] fixed the bug in the error handling 2019-02-06 16:19:10 +01:00
Ben Bosman
f2b3f7027c Support relationships using a uri-list: merge 2019-02-06 16:12:47 +01:00
Mark H. Wood
8fb1ac5bd6 [DS-3695] Rip out big handfuls of unused fieldtypes, commentary about
Solr not DSpace.  Break loooong tags into attribute-per-line format.
2019-02-06 09:45:14 -05:00
Ben Bosman
fb2e0d556e Merge branch 'w2p-57159_permission-to-create-relations' into w2p-58898_place-column-calculation-error
# Conflicts:
#	dspace-spring-rest/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java
#	dspace-spring-rest/src/main/java/org/dspace/app/rest/repository/RelationshipRestRepository.java
#	dspace-spring-rest/src/test/java/org/dspace/app/rest/RelationshipRestRepositoryIT.java
2019-02-06 14:47:19 +01:00
Kevin Van de Velde
05cb949655 Small changes in how exceptions are handled in the ItemRestRepository, DSpaceRestRepository classes 2019-02-06 14:21:37 +01:00
Raf Ponsaerts
47d7021350 Added owningCollection as parameter to the POST Item endpoint and removed this from ItemRest. Also added documentation for several public methods. 2019-02-06 14:21:36 +01:00
Kevin Van de Velde
02ff91259f [DS-4108] Rest api item CRUD: Updating item metadata 2019-02-06 14:20:21 +01:00
Ben Bosman
864f6f1237 Support relationnships using a uri-list 2019-02-06 14:19:12 +01:00
Raf Ponsaerts
db8f6f1bb8 Fixed the 500 internal server error when calling item delete endpoint with invalid UUID 2019-02-06 14:19:10 +01:00
Raf Ponsaerts
2e446bf9a3 Added admin access to item delete and wrote tests 2019-02-06 14:19:10 +01:00
Raf Ponsaerts
ac283df61d Implemented the create method in the ItemRestRepository 2019-02-06 14:19:10 +01:00
Mark H. Wood
241fdbb538 [DS-3695] We no longer control Solr's logging. 2019-02-04 15:15:48 -05:00
Mark H. Wood
cb17cb1b7f [DS-3695] See bf4ead40575f0b180fd6840373ef17d98a6e778e. We *do* configure Solr for testing. 2019-02-04 14:58:43 -05:00
Mark H. Wood
8c26a9a78e [DS-3695] Upgrade indexes all the way to 7_x. 2019-02-04 14:58:43 -05:00
Mark H. Wood
ba0edff464 [DS-3695] We no longer configure Solr itself. 2019-02-04 14:58:43 -05:00
Mark H. Wood
f2cfab2d8d [DS-3695] Remaining minimal changes to make all cores load in Solr 7. 2019-02-04 14:58:43 -05:00
Mark H. Wood
fa9cc7c958 [DS-3695] Cure failing IT: the test was wrong. 2019-02-04 14:58:43 -05:00
Mark H. Wood
c375fe1762 [DS-3695] Make 'search' core load in stock Solr 7.2.1.
This should work without altering Solr, across Solr releases, as long
as Solr ships the necessary additional analyzers in /contrib.
2019-02-04 14:58:43 -05:00
Mark H. Wood
0792ba77ed [DS-3695] Start ripping out Solr server. 2019-02-04 14:58:43 -05:00
Mark H. Wood
3e8e1aa60f [DS-3695] Document what I puzzled out ot MockSolrServer, and small cleanups. 2019-02-04 14:58:43 -05:00
Tim Donohue
cc38ec872a Fix Solr startup errors by downgrading to 7.3.1 2019-02-04 14:58:43 -05:00
Mark H. Wood
9d0483c146 [DS-3695] Exclude Jetty from solr-core and solr-cell: Solr and Spring
Boot are fighting over versions.
2019-02-04 14:58:43 -05:00
Mark H. Wood
8d5de13987 [DS-3695] Switch new class from SolrServer to SolrClient. 2019-02-04 14:58:43 -05:00
Mark H. Wood
ee3b60c45e [DS-3695] Complete botched conflict fixup. 2019-02-04 14:58:43 -05:00
Tim Donohue
3504397031 Update to Solr 7.5. Sync dependencies and cleanup spring-rest POM 2019-02-04 14:58:43 -05:00
Tim Donohue
5e78f408f3 Disable Solr autoconfiguration in Spring Boot. Minor config cleanup 2019-02-04 14:58:43 -05:00
Tim Donohue
ece64480a0 Revert Spring Boot updates until DS-3802 is solved. Solr core only for testing. 2019-02-04 14:58:43 -05:00
Tim Donohue
01b80025b7 Fix minor compilation errors in OAI 2019-02-04 14:58:43 -05:00
Mark H. Wood
32a3c74164 [DS-3695] Start work on dspace-spring-rest 2019-02-04 14:58:43 -05:00
Mark H. Wood
3aa6b89487 [DS-3695] Rip out lots of Solr config. that is no longer defined in v7. 2019-02-04 14:58:43 -05:00
Mark H. Wood
7b9bd509a2 [DS-3695] Upgrade Solr *client* to 7.3.0. 2019-02-04 14:58:43 -05:00
Tim Donohue
3e5c9426aa Merge pull request #2277 from atmire/rest_comm_coll_management
Added support for the CRUD operations on the Collection and Community…
2019-02-04 09:13:03 -06:00
Raf Ponsaerts
2db2b70e82 Altered permissions on the Collection and Community Repositories and added ITs 2019-02-04 10:11:52 +01:00
Andrea Bollini
8c35296cdf Merge pull request #2337 from Georgetown-University-Libraries/ds4150
[DS-4150] Ensure dspace.restUrl is supported out of the box for DSpace 7 Docker
2019-01-31 16:57:50 +01:00
kshepherd
ed4f63c0b4 Merge pull request #2338 from DSpace/README-running-tests
Add Notes on running Tests to README
2019-01-31 11:00:42 +13:00
Tim Donohue
1957f36988 Notes on running tests 2019-01-30 15:24:37 -06:00
Ben Bosman
855b3f9556 getValues should not return null values 2019-01-29 16:22:26 +01:00
Ben Bosman
e99252c16f Merge branch 'w2p-57159_permission-to-create-relations' into w2p-58898_place-column-calculation-error 2019-01-29 14:52:05 +01:00
Ben Bosman
0a91360061 Merge remote-tracking branch 'community/configurable_entities' into w2p-57159_permission-to-create-relations
# Conflicts:
#	dspace-api/src/main/java/org/dspace/content/service/RelationshipService.java
2019-01-29 14:31:15 +01:00
benbosman
c72d731f93 Merge pull request #2302 from atmire/w2p-57441_modified-behavior-retrieving-relations
Retrieving relations per relationship type
2019-01-29 14:25:35 +01:00
benbosman
a663f05ad5 Merge pull request #2324 from atmire/discovery-config-relations
Discovery config relations
2019-01-29 14:20:53 +01:00
Ben Bosman
e01e133765 wording 2019-01-29 13:07:49 +01:00
Ben Bosman
887a0999bd wording 2019-01-29 12:20:49 +01:00
Ben Bosman
2dc03adc58 better readable code 2019-01-29 11:11:48 +01:00
Ben Bosman
96f08e4414 Merge branch 'w2p-59343_support-ordered-metadata' into w2p-58898_place-column-calculation-error
# Conflicts:
#	dspace-spring-rest/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java
2019-01-29 10:41:05 +01:00
Ben Bosman
765e14b007 Verify order of virtual and real metadata 2019-01-29 10:39:55 +01:00
Ben Bosman
f8497ff392 JavaDoc 2019-01-29 10:26:25 +01:00
Ben Bosman
7abc5b01e2 Merge remote-tracking branch 'origin/w2p-57159_permission-to-create-relations' into w2p-58898_place-column-calculation-error
# Conflicts:
#	dspace-api/src/main/java/org/dspace/content/RelationshipServiceImpl.java
#	dspace-api/src/main/java/org/dspace/content/service/RelationshipService.java
2019-01-29 09:58:22 +01:00
Terry Brady
da1346ef53 Match dspace.restUrl to module name 2019-01-28 10:05:48 -08:00
Raf Ponsaerts
7dc59a03a2 Fixed nullpointer issue in th comparator 2019-01-28 15:35:56 +01:00
Raf Ponsaerts
a62a66ec41 removed the ItemService from the DspaceObjectService 2019-01-28 14:10:36 +01:00
Raf Ponsaerts
b908df263c [Task 59343] sorted the list of metadatavalues and forced the itemconverter to use the itemservice 2019-01-28 13:24:35 +01:00
Raf Ponsaerts
7d961db6ce Applied feedback 2019-01-28 11:49:19 +01:00
benbosman
54a626892a indentation 2019-01-25 09:10:19 +01:00
Ben Bosman
e768866292 Merge branch 'w2p-58898_place-column-calculation-error' into w2p-59343_support-ordered-metadata 2019-01-24 15:22:13 +01:00
Ben Bosman
d7610ccf6e Resolving merge conflict 2019-01-24 14:19:09 +01:00
Ben Bosman
1311c40cb7 Merge remote-tracking branch 'origin/w2p-57159_permission-to-create-relations' into w2p-58898_place-column-calculation-error
# Conflicts:
#	dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java
#	dspace-api/src/main/java/org/dspace/content/RelationshipServiceImpl.java
#	dspace-api/src/main/java/org/dspace/content/service/ItemService.java
#	dspace-api/src/main/java/org/dspace/content/virtual/Collected.java
#	dspace-api/src/main/java/org/dspace/content/virtual/UUIDValue.java
#	dspace-spring-rest/src/test/java/org/dspace/app/rest/RelationshipRestRepositoryIT.java
#	dspace/config/spring/api/core-services.xml
2019-01-24 13:59:36 +01:00
Ben Bosman
257b14bc54 Merge branch 'w2p-58898_place-column-calculation-error' into w2p-59343_support-ordered-metadata 2019-01-24 13:51:52 +01:00
Ben Bosman
dac5ca467c Merge branch 'configurable_entities_temp_2' into w2p-58898_place-column-calculation-error 2019-01-24 13:48:33 +01:00
Ben Bosman
816c7d0772 Merge branch 'configurable_entities_comm' into w2p-58898_place-column-calculation-error 2019-01-24 13:34:16 +01:00
Ben Bosman
391c5b4dc6 Sort metadata values part 1 2019-01-24 13:33:13 +01:00
Ben Bosman
51de4cd87c JavaDocs and boolean name update 2019-01-24 13:26:35 +01:00
Raf Ponsaerts
1ed01310b0 Implemented the additional getMetadata function with the boolean 2019-01-24 10:06:42 +01:00
Ben Bosman
8557f44673 Merge branch 'configurable_entities_temp_2' into w2p-57159_permission-to-create-relations 2019-01-23 12:03:54 +01:00
Ben Bosman
025c77ee70 authorization for updating the place of both items 2019-01-23 12:01:50 +01:00
Ben Bosman
1ac514d4fb Merge branch 'configurable_entities_temp_2' into discovery-config-relations 2019-01-23 09:04:32 +01:00
benbosman
ccaca1aaaa indentation fix 2019-01-22 17:32:31 +01:00
Ben Bosman
a52b850080 temp disable ITs 2019-01-22 16:24:29 +01:00
Raf Ponsaerts
4847620a4c Fixed issues and wrote IT tests 2019-01-22 14:58:43 +01:00
Raf Ponsaerts
7e14562139 Merged configurable_entities into w2p-57159_permission-to-create-relations 2019-01-22 12:50:44 +01:00
Ben Bosman
461b5ee21a typing error 2019-01-22 11:15:28 +01:00
Raf Ponsaerts
86f3f9e9c3 Fixed tests and place calculation 2019-01-22 09:53:52 +01:00
Tim Donohue
f387ab18c0 Merge remote-tracking branch 'upstream/master' into configurable_entities 2019-01-21 20:02:17 +00:00
Raf Ponsaerts
f2c2b768f7 Fixed test failure 2019-01-21 10:14:09 +01:00
Raf Ponsaerts
bfc1b39ea2 Merged configurable_entities into w2p-58898_place-column-calculation-error 2019-01-21 09:19:01 +01:00
Kim Shepherd
929312d7e8 [DS-4136] tidy up commented out (old) configurationService instantiation in main() 2019-01-19 10:40:21 +13:00
Kim Shepherd
6d40a57df7 [DS-4136] Apply same changes as dspace 6.x version (PR #2320); replace xoai services configurationservice with proper DSpace configuration service; and replace all instances of legacy configuration manager with proper DSpace configuration service 2019-01-19 10:33:00 +13:00
Kim Shepherd
e7f39c23b2 WIP commit for oai master port 2019-01-18 07:30:47 +13:00
Giuseppe Digilio
48d90660ab Fixes after check on angular app 2019-01-17 17:15:04 +01:00
Raf Ponsaerts
0f6148db77 Fixed RunTimeException messages and addressed nullpointer error 2019-01-16 09:39:14 +01:00
Raf Ponsaerts
ae96aeafd4 Applied community feedback, javadocs 2019-01-15 15:49:42 +01:00
Ben Bosman
269fdd2852 additional filters in default configuration 2019-01-15 12:44:29 +01:00
Raf Ponsaerts
a757a6661e Fixed the relationship place ordering and MetadataValue place ordering 2019-01-15 11:51:48 +01:00
Ben Bosman
6ee687a4ae metadata attributes place, authority, confidence in rest 2019-01-15 08:50:52 +01:00
kshepherd
07d6846234 Merge pull request #2327 from J4bbi/ds4142_master
[DS-4142] master: update maven jdks for docker
2019-01-15 09:54:23 +13:00
Hrafn Malmquist
9b6e97fd24 update maven jdks for docker 2019-01-14 17:28:32 +00:00
Raf Ponsaerts
865b4256b9 Made sure that the itemService.update is called so that the metadata and relationship places get updated upon creation/deletion of a relationship 2019-01-14 15:53:24 +01:00
Raf Ponsaerts
7f225f1ad5 Fixed the place attribute not being set to the virtual metadata 2019-01-14 15:29:54 +01:00
Ben Bosman
6d40600dbb Merge branch 'configurable_entities' into discovery-config-relations
# Conflicts:
#	dspace/config/spring/api/discovery.xml
2019-01-14 14:23:34 +01:00
kshepherd
04d11924a5 Merge pull request #2323 from kshepherd/DS-4104_google_scholar_date_crosswalk_master
[DS-4104] Apply Google Scholar date crosswalk fix to master
2019-01-14 11:33:53 +13:00
Kim Shepherd
74fc3e97bb [DS-4104] Apply Google Scholar date crosswalk fix to master, porting from PR#2294 dspace 6 version 2019-01-14 09:20:43 +13:00
kshepherd
51550079c4 Merge pull request #2311 from mwoodiupui/DS-4087
[DS-4087] 'dspace structure-builder' errors are too hard to interpret
2019-01-13 13:22:13 +13:00
Raf Ponsaerts
3d011c5411 Applied fixes to catch clauses and added more docs 2019-01-11 15:01:59 +01:00
Raf Ponsaerts
b96cf91ef8 Applied community feedback and fixed tests 2019-01-11 09:38:21 +01:00
Raf Ponsaerts
612c8e315c Made the PUT and POST relationship endpoints now use relationshipType parameter instead of the relationshipType in the body 2019-01-07 10:54:17 +01:00
Terry Brady
6e02e145f0 Merge pull request #2314 from tdonohue/DS-4129
DS-4129: Remove unnecessary HarvestConsumer
2019-01-03 16:04:17 -08:00
Tim Donohue
33eedeaba8 DS-4129: Remove unnecessary HarvestConsumer 2019-01-03 16:13:05 +00:00
kshepherd
cd5d1b038d Merge pull request #2298 from kshepherd/DS-4113_clean_plugin
[DS-4113] Configure clean plugin in parent pom to also clean sub-modules
2019-01-03 18:09:55 +13:00
Andrea Bollini
67602275bd Merge branch 'master' of https://github.com/DSpace/DSpace into DS-3851_workflow 2019-01-02 13:57:26 +01:00
Andrea Bollini
821d106439 Rely on Spring LateObjectEvaluator instead than our custom porting 2019-01-02 13:57:14 +01:00
Andrea Bollini
bf0139d5e9 Disable IT for basic workflow 2019-01-02 12:24:34 +01:00
Andrea Bollini
8a2ae7e891 Add javadocs: community feedback from https://github.com/4Science/DSpace/pull/37 2019-01-02 12:13:56 +01:00
Andrea Bollini
b3b1c8786e Fix H2 sql for xml workflow 2019-01-01 18:04:35 +01:00
Andrea Bollini
f84e951106 Add support for xmlworkflow in H2 database (Oracle sql as is) 2019-01-01 17:09:04 +01:00
Andrea Bollini
69f7c7588d Enable configuration workflow by default 2019-01-01 17:08:17 +01:00
Andrea Bollini
c8e11e6700 Merge remote-tracking branch 'origin/master' into DS-3851_workflow 2019-01-01 13:00:11 +01:00
Mark H. Wood
7b2b923390 [DS-4087] Small improvements to message texts. 2018-12-31 09:31:07 -05:00
Mark H. Wood
f255521c8e [DS-4087] First pass at better error reporting. 2018-12-30 18:37:00 -05:00
Andrea Bollini
1a7b22a784 Merge pull request #2293 from tdonohue/REST-IT-refactor
Super Speedy REST API v7 Integration Tests
2018-12-25 16:48:18 +01:00
benbosman
0692701f7a Merge pull request #2270 from atmire/virtual-metadata-update
Entities Virtual metadata update
2018-12-21 12:49:05 +01:00
Ben Bosman
c9bbbfde1f merge conflict 2018-12-20 11:26:05 +01:00
Ben Bosman
37fc3ee409 Merge remote-tracking branch 'community/configurable_entities' into w2p-57441_modified-behavior-retrieving-relations
# Conflicts:
#	dspace-api/src/main/java/org/dspace/content/RelationshipServiceImpl.java
#	dspace-api/src/main/java/org/dspace/content/dao/RelationshipTypeDAO.java
#	dspace-api/src/main/java/org/dspace/content/dao/impl/RelationshipTypeDAOImpl.java
#	dspace-api/src/main/java/org/dspace/content/service/RelationshipService.java
#	dspace-api/src/main/java/org/dspace/content/service/RelationshipTypeService.java
2018-12-20 11:10:06 +01:00
Tim Donohue
f3f784c6ec Merge pull request #2269 from atmire/w2p-57712_merging-csv-functionality
Entities csv import fixes for creating relations
2018-12-19 14:05:56 -06:00
Samuel
fb1a8b8c33 Added support for the CRUD operations on the Collection and Community REST endpoints - remove owningCommunity field from CommunityRest and CollectionRest 2018-12-19 14:05:22 +01:00
Tim Donohue
035832cfdd Merge pull request #2288 from atmire/w2p-57718_relationships-link-to-items
[DS-4084] - Relationships should link to Items via REST API
2018-12-18 14:18:35 -06:00
Samuel
e283f43467 Added support for the CRUD operations on the Collection and Community REST endpoints - send parentCommunity as request parameter 2018-12-18 11:09:12 +01:00
Ben Bosman
32c9585ede checkstyle 2018-12-18 10:55:22 +01:00
Ben Bosman
0d678aca58 JavaDoc 2018-12-18 10:35:30 +01:00
Ben Bosman
fe437ad88d checkstyle 2018-12-18 10:29:47 +01:00
Ben Bosman
dc6bd487e0 checkstyle 2018-12-18 10:29:04 +01:00
Ben Bosman
b050db1cae JavaDoc updates 2018-12-18 10:17:12 +01:00
Ben Bosman
efd579f054 typing error 2018-12-18 09:58:57 +01:00
Ben Bosman
1b0f00df18 Comment on relationship.type 2018-12-18 09:42:48 +01:00
Tim Donohue
dfd3ba4e38 Ensure BitstreamContent tests start with an empty Solr Statistics core 2018-12-17 21:04:56 +00:00
Tim Donohue
78e3e00e7f Rename OpenSearch tests to end in "IT" since they are Integration Tests. 2018-12-17 21:04:15 +00:00
Tim Donohue
7fe7da4813 Fix OpenSearch test issues by ensure OpenSearchServiceImpl doesn't cache configs & that Tests specify the config setting they expect. 2018-12-17 20:40:46 +00:00
Tim Donohue
74aa295787 Refactor MockSolrLoggerServiceImpl to initialize FakeDatabaseReader before every method 2018-12-17 18:44:27 +00:00
Tim Donohue
e0175bd198 Improve logging messages of SolrLogger service/listener. Debugging is otherwise impossible 2018-12-17 18:44:27 +00:00
Tim Donohue
f0ff442959 Stop using MockSolrServer directly and use the Solr Statistics Service 2018-12-17 18:44:27 +00:00
Tim Donohue
b05eec3b92 Cache default EPersons (for reuse). Fix EPersonRestRepository tests to not rely on default EPersons as heavily. 2018-12-17 18:44:27 +00:00
Tim Donohue
c305879b51 Remove @DirtiesContext and allow reuse of Spring ApplicationContext & DSpace Kernel 2018-12-17 18:44:27 +00:00
Tim Donohue
c2803a7f69 Ensure full stacktrace is logged for Integration Tests 2018-12-17 18:44:27 +00:00
Tim Donohue
ff1f46aedb Externalize DSpaceKernelInitializer so that it can be used by Integration Tests to start Kernel 2018-12-17 18:44:27 +00:00
Tim Donohue
942f2e7b92 Remove unnecessary (default) annotations. Comment what each annotation does. 2018-12-17 18:44:27 +00:00
Ben Bosman
39bf016eb8 Merging configurable_entities branch 2018-12-17 13:45:04 +01:00
Ben Bosman
1776f892e0 Merge remote-tracking branch 'community/configurable_entities' into w2p-57441_modified-behavior-retrieving-relations 2018-12-17 12:18:43 +01:00
Ben Bosman
10c610bf73 Merge branch 'w2p-57712_merging-csv-functionality' into virtual-metadata-update 2018-12-17 10:08:25 +01:00
Ben Bosman
6fca857429 Merge remote-tracking branch 'community/configurable_entities' into w2p-57712_merging-csv-functionality 2018-12-17 09:55:59 +01:00
Ben Bosman
feaa5d1adb Merge remote-tracking branch 'community/configurable_entities' into w2p-57718_relationships-link-to-items 2018-12-17 09:39:52 +01:00
Tim Donohue
21fe950593 Merge pull request #2299 from tdonohue/remove_xmlui_spring_configs
Remove obsolete XMLUI spring configs (from master)
2018-12-14 10:41:35 -06:00
Raf Ponsaerts
4e51fd0c86 Added support for the leftItem and rightItem through the parameters 2018-12-14 17:20:45 +01:00
Tim Donohue
33268e8980 Remove obsolete XMLUI spring configs 2018-12-14 16:09:19 +00:00
Tim Donohue
c3f2fa7f47 Increase memory available to Unit/Integration Tests 2018-12-14 15:29:04 +00:00
Tim Donohue
04475e42eb Merge pull request #2286 from tdonohue/temp_memory_issue_fix
[Configurable Entities] Temporary fix for Integration Test memory issues
2018-12-14 09:27:24 -06:00
Raf Ponsaerts
240636e978 Created new IT and fixed checkstyle 2018-12-14 14:53:11 +01:00
Raf Ponsaerts
2f0fb32c8c Fixed typo 2018-12-14 13:27:29 +01:00
Raf Ponsaerts
e932075d03 Added documentation and @Override where applicable 2018-12-14 13:26:16 +01:00
Raf Ponsaerts
3123c1dd61 Wrote documentation for the spring beans 2018-12-14 09:54:26 +01:00
Andrea Bollini
43bfdbb3ff Merge pull request #2253 from tantz001/DS-4050
DS-4050 Search method returns 204 NoContent or 200 for empty page if no result.
2018-12-13 22:54:01 +01:00
Andrea Bollini
c4ab55f6f5 Merge pull request #2128 from tubhh/dspace7_opensearch
DSpace7 opensearch
2018-12-13 22:48:35 +01:00
Kim Shepherd
20904f897d [DS-4113] Configure clean plugin in parent pom to also clean sub-modules 2018-12-14 10:17:11 +13:00
Raf Ponsaerts
d95c378edc Implemented the feedback 2018-12-13 15:45:15 +01:00
Oliver Goldschmidt
67046598c1 fixes typo 2018-12-13 09:34:40 +01:00
Oliver Goldschmidt
8ccb41a205 Merge branch 'master' of https://github.com/DSpace/DSpace into dspace7_opensearch 2018-12-13 09:33:46 +01:00
Andrea Bollini
7ccd99c97c Merge pull request #2173 from ppmdo/DS-3904
DS-3904 - Add support for startsWith parameters in the browse endpoint
2018-12-12 22:52:44 +01:00
Raf Ponsaerts
314e96a6ac Merged the merging-csv-functionality branch into virtual-metadata-update 2018-12-12 10:54:16 +01:00
Tim Donohue
1b2cc563d7 Increase memory available to Unit/Integration Tests 2018-12-11 15:32:11 +00:00
Raf Ponsaerts
4171729f84 Wrote IT test for CSV import and fixed a small error 2018-12-11 09:30:13 +01:00
Raf Ponsaerts
8024242fd1 Added @Override to applicable methods 2018-12-10 09:56:43 +01:00
Tim Donohue
0feb1748a0 Merge pull request #2284 from AlexanderS/ds3914-master
DS-3914: Fix community defiliation (master)
2018-12-07 09:37:06 -06:00
Raf Ponsaerts
6e16a3ed62 Applied community feedback 2018-12-07 16:09:29 +01:00
Alexander Sulfrian
e02174bad8 CommunityFiliator: Some cleanup
This removes the loops for checking if a community is contained in a list of
communities. Community.equals() does the same check, so we simply can use
contains().
2018-12-07 11:22:01 +01:00
Alexander Sulfrian
5ecc473311 DS-3914: Fix community defiliation
This fixes in issue in the defiliate method of the community filiator. The
child and parent relations should be managed using the provided methods of the
Community.

This changes the visibility of Community.removeSubCommunity() to public, but
Community.removeParentCommunity() was public before already.
2018-12-07 11:19:06 +01:00
Raf Ponsaerts
7d07a1e6f9 Merged branch configurable_entities into w2p-57712_merging-csv-functionality 2018-12-07 10:38:19 +01:00
Tim Donohue
09faec3f17 Update Entities code to use log4j2 and commons-lang v3 (per master merge) 2018-12-06 17:43:15 +00:00
Tim Donohue
ceed829800 Merge branch 'master' into configurable_entities 2018-12-06 17:05:31 +00:00
Mark H. Wood
8df33db457 [DS-3989] Fix code from DS-3990 that was (expectedly) broken by DS-3989. 2018-12-05 16:13:24 -05:00
Mark H. Wood
844ef85583 [DS-3989] Placate Checkstyle. 2018-12-05 15:41:37 -05:00
Mark H. Wood
1d3a58958e [DS-3989] Update new method for Commons Configuration v2. 2018-12-05 15:41:37 -05:00
Mark H. Wood
1e5bcd47ce [DS-3989] Clarify help for --reporter option. 2018-12-05 15:41:37 -05:00
Mark H. Wood
c795c8b4b4 [DS-3989] Clarify submission task configuration. 2018-12-05 15:41:37 -05:00
Mark H. Wood
85db4fe2bb [DS-3989] Commit testing task. 2018-12-05 15:41:37 -05:00
Mark H. Wood
6084f892b2 [DS-3989] Remove unworkable task-id logging; ensure closure of Reporter. 2018-12-05 15:41:37 -05:00
Mark H. Wood
ff8a5fac97 [DS-3989] Add configuration for the Reporter plugin. 2018-12-05 15:41:37 -05:00
Mark H. Wood
dbcb1f291c [DS-3989] Fix invalid date format. 2018-12-05 15:41:37 -05:00
Mark H. Wood
d9c80d8afc [DS-3989] Fix checkstyle issues. 2018-12-05 15:41:37 -05:00
Mark H. Wood
9f64b9aa95 [DS-3989] Fix license blocks for new classes. 2018-12-05 15:41:37 -05:00
Mark H. Wood
de33ece1c9 [DS-3989] Pull report writing out through a pluggable interface, provide log and file plugins 2018-12-05 15:41:37 -05:00
Mark H. Wood
ab224b2508 [DS-3989] Integration test for Curator's reporting. 2018-12-05 15:41:37 -05:00
Mark H. Wood
1478ab36fa [DS-3989] Let tests set multiple values on a configuration property. 2018-12-05 15:41:37 -05:00
Mark H. Wood
d6d0e67017 [DS-3989] Placate Checkstyle: unused import. 2018-12-05 15:41:37 -05:00
Mark H. Wood
f23fef032f [DS-3989] Fix the date format. 2018-12-05 15:41:37 -05:00
Mark H. Wood
94ed795d00 [DS-3989] Accumulate reports; implement arbitrary output paths, not just standard output. 2018-12-05 15:41:37 -05:00
Mark H. Wood
301f804d44 Merge pull request #2279 from mwoodiupui/DS-4094
[DS-4094] DiscoveryRestController and DiscoveryRestRepository log as ScopeResolver
2018-12-05 15:23:58 -05:00
Mark H. Wood
90a9df75ef [DS-4094] Use correct logging categories; fix incorrect argument in a couple of messages. 2018-12-04 16:11:44 -05:00
Ivan Masár
6bbcc08e59 fix checkstyle broken by previous commit (sorry) 2018-12-03 22:44:36 +01:00
Santiago Tettamanti
733ef18184 DS-4019 Added a check for null when the group of a policy is obtained in getMostRecentModificationDate() and willChangeStatus() methods from XOAI.java class 2018-12-03 22:03:50 +01:00
Raf Ponsaerts
d77dd7fa4f Added support for the CRUD operations on the Collection and Community REST endpoints 2018-12-03 15:40:42 +01:00
Mark H. Wood
495ccdee21 Merge pull request #2168 from mwoodiupui/DS-3980
[DS-3980] Defend against bad MIME type in Email.addAttachment
2018-11-30 11:53:25 -05:00
Raf Ponsaerts
4c1fdfc49c Added a license header 2018-11-29 15:45:32 +01:00
Raf Ponsaerts
dcf21c04cf [Task 57718] added the leftItem and rightItem links to the relationship endpoint return 2018-11-29 15:25:48 +01:00
Raf Ponsaerts
699fb42683 [Task 57441] fixed the label vs id conflicting issue 2018-11-29 13:56:38 +01:00
Mark H. Wood
769398d6fb Merge pull request #2181 from mwoodiupui/DS-3990
[DS-3990] Curation task runs need per-run parameters.
2018-11-28 11:52:33 -05:00
Raf Ponsaerts
8f81f28607 [Task 57160] implemented CRUD methods on relationship endpoint 2018-11-28 14:17:11 +01:00
Tim Donohue
9bc1f1ce69 Merge pull request #2274 from DSpace/elasticsearch_cleanup
DS-3455: Delete elasticsearch-statistics-mapping.json (ElasticSearch was removed)
2018-11-27 07:58:57 -08:00
Raf Ponsaerts
13fea6312e Cleaned up and fix license headers 2018-11-27 09:37:17 +01:00
Raf Ponsaerts
41ff37de1b [Task 57441] fixed pagination on the /label endpoint 2018-11-27 08:33:10 +01:00
Tim Donohue
58c01c500c Delete elasticsearch-statistics-mapping.json 2018-11-26 16:30:46 -06:00
Tim Donohue
1d9a4edcc0 Merge pull request #2266 from mwoodiupui/DS-4077
[DS-4077] filter-media error on WordFilter -- remove old, failing WordFilter
2018-11-26 09:24:48 -08:00
Tim Donohue
b44cf37d9d Merge pull request #2267 from atmire/w2p-57442_relation-enum-metadataschema
DS-4083 Move all Schema constants to a new MetadataSchemaEnum class
2018-11-26 09:03:21 -08:00
Raf Ponsaerts
c6dfae90de intermediary commit 2018-11-26 09:02:30 +01:00
Raf Ponsaerts
925b83bc02 [Task 57441] Implemented the requested endpoints, except for one todo 2018-11-22 16:05:27 +01:00
Raf Ponsaerts
fc8e3744b6 Merged w2p-56183_additional-relationshiptype-endpoints into w2p-57441_modified-behavior-retrieving-relations and fixed checkstyle, license headers 2018-11-22 13:41:06 +01:00
Raf Ponsaerts
0b4dc28dfa Merge branch 'w2p-56182_additional-endpoint-relations' into w2p-57441_modified-behavior-retrieving-relations 2018-11-22 13:10:27 +01:00
Raf Ponsaerts
e7b6aded69 Fixed tests and added license headers 2018-11-22 10:41:45 +01:00
Pablo Prieto
30f8b61ad0 Merge branch 'master' of https://github.com/DSpace/DSpace into DS-3904 2018-11-21 21:24:39 -06:00
Pablo Prieto
6600839fa4 Fixed ITs 2018-11-21 21:23:00 -06:00
Pablo Prieto
8ebb6a1a41 Added handling of empty querystring. Added startsWithandPage test. 2018-11-21 18:58:09 -06:00
Mark H. Wood
7613a204ed [DS-3990] Trim parameter name, value; remove unused test code. 2018-11-21 11:10:42 -05:00
Raf Ponsaerts
912a1f0be6 Merged entities-PR2 into virtual-metadata-update 2018-11-21 15:47:08 +01:00
Raf Ponsaerts
38250f94ad Fixed checkstyle 2018-11-21 11:37:14 +01:00
Raf Ponsaerts
34a2b92754 Merged entities-PR2 into w2p-57712_merging-csv-functionality 2018-11-21 11:02:28 +01:00
Raf Ponsaerts
b905f69d63 Refactored the usages of MetadataSchema.DC to use the enum instead 2018-11-21 09:35:11 +01:00
Mark H. Wood
aeee597101 [DS-4077] Remove traces of tm-extractors. 2018-11-20 19:55:02 -05:00
Mark H. Wood
5114f21da8 [DS-4077] Remove old, failing WordFilter; use PoiWordFilter instead. 2018-11-20 18:58:41 -05:00
Tim Donohue
52f1cf1fdc Merge pull request #2246 from atmire/entities-PR2
Initial entities PR
2018-11-20 16:05:34 -06:00
Raf Ponsaerts
b8d6fa2fba [Task 57442] Added the MetadataSchemaEnum with DC and RELATION, rewrote methods using relation String 2018-11-20 15:51:41 +01:00
Raf Ponsaerts
cfeabe349c [Task 57188] added the logic for the advanced place column calculation. Enforced that all virtual metadata are valid metadatafields 2018-11-20 14:32:12 +01:00
Raf Ponsaerts
0ba6d75a30 Constructed a RelationshipMetadataValue object and refactored the getRelationshipMetadata methods in the ItemServiceImpl to use this object instead of it's parent MetadataValue 2018-11-19 16:14:51 +01:00
benbosman
ad7bd23038 Additional search filter 2018-11-16 17:39:36 +01:00
Raf Ponsaerts
3a972ba5b9 Added config for virtual metadata fields on the journalIssue and Publication item pages 2018-11-16 15:17:23 +01:00
Raf Ponsaerts
a755f296bf Added relation.isJournalOfPublication metadata field on the publication with config and addition of a new uuidvalue bean 2018-11-16 11:14:28 +01:00
Raf Ponsaerts
255de21271 [Task 57187] added the relationship ID after the virtual:: in the metadata field authority 2018-11-16 10:10:02 +01:00
Tim Donohue
76470f4eec Merge pull request #2262 from tdonohue/DS-4078
DS-4078: Bitstreams should keep their formats when being versioned. (Master port)
2018-11-15 11:35:55 -06:00
Tim Donohue
f3245e7c88 DS-4078: Bitstreams should keep their formats when being versioned. 2018-11-15 16:28:14 +00:00
Tim Donohue
5eeb40e530 Merge pull request #2258 from atmire/w2p-57063_http-vs-https-request-url
Make the REST URL configurable instead of relying on the request.getUrl
2018-11-15 10:20:48 -06:00
Raf Ponsaerts
8e8bb65f75 Removed a problematic context.close() 2018-11-15 15:13:16 +01:00
Raf Ponsaerts
01c1c0cddf [Task 57265] added the request search filters 2018-11-15 12:50:01 +01:00
Raf Ponsaerts
7b2233a9b7 [Task 57186] added useForPlace property to the beans and added setters and getters to the interface. defaulting to false. Added this property to the isAuthorOfPublication config 2018-11-15 10:45:45 +01:00
Raf Ponsaerts
86cd0c7025 [Task 57103] added javadoc 2018-11-15 09:27:56 +01:00
Raf Ponsaerts
353d4c9d60 [Task 57103] implemented the support for related beans and added the option for config to supply virtual metadata throughout many nested relations 2018-11-14 16:06:33 +01:00
Pablo Prieto
7d5e12a793 Added QueryString null handling. 2018-11-13 18:40:28 -06:00
Raf Ponsaerts
37ffda5d21 Made sure that the delete and create functions in relationshipservice update the place attributes accordingly. Fixed checkstyle and added license header. Changed cacheable functions to false in relationshipDAO 2018-11-12 15:56:28 +01:00
Raf Ponsaerts
5de574d65a Merged w2p-57104_csv-import-delete-functionality into w2p-57107_mixing-entities-and-plaintest-values 2018-11-12 13:14:40 +01:00
Raf Ponsaerts
b226666113 Added loops to set places on left and right places for relationships on create and delete 2018-11-12 13:11:43 +01:00
Raf Ponsaerts
b3a4a196d8 [Task 57107] changed the functionality of the VirtualMetadataPopulator to now support a bean 2018-11-12 12:43:10 +01:00
Ben Bosman
45afa91d0e Removed setter since it's no longer used 2018-11-12 09:32:40 +01:00
Ben Bosman
c30e548533 use only one extraction of the UUID 2018-11-12 09:26:18 +01:00
Tim Donohue
94aeb7c18a Merge pull request #2241 from mwoodiupui/DS-3135-again
[DS-3135] Replace log4j 1.2, which is EOL
2018-11-09 10:34:10 -06:00
Oliver Goldschmidt
e53af97a31 fixes checkstyle issue 2018-11-09 16:39:17 +01:00
Mark H. Wood
9e1c16990f [DS-3135] Remove leftover comment (and surrounding empty niladic constructor). 2018-11-09 09:02:59 -05:00
Mark H. Wood
18868370de [DS-3135] Remove commented-out attempt by logged code to tell logging framework its configuration path. 2018-11-09 09:02:59 -05:00
Mark H. Wood
468725346f Remove system property pointing to logging config. Instead just let
framework do its default search, which should find the config. on the
classpath.
2018-11-09 09:02:59 -05:00
Mark H. Wood
3929a5d7ac [DS-3135] Remove questionable loglevel fiddling; conform to webapp descriptor schema. 2018-11-09 09:02:59 -05:00
Mark H. Wood
e671680ae4 [DS-3135] Let Spring configure Log4J; correct logging config in Ant script. 2018-11-09 09:02:59 -05:00
Mark H. Wood
eff018aace [DS-3135] Squash another unused import for Checkstyle. 2018-11-09 09:02:59 -05:00
Mark H. Wood
38e1d0a8d4 [DS-3135] Set the log4jConfiguration context parameter properly. 2018-11-09 09:02:59 -05:00
Mark H. Wood
5b66fdf5db [DS-3135] Clean out temporary exclusions; tidy indentation. 2018-11-09 09:02:59 -05:00
Mark H. Wood
2925a3a3bf [DS-3135] Finish converting log4j2 configuration; make it work for Solr; tweak Ant usage. 2018-11-09 09:02:59 -05:00
Mark H. Wood
b668860a2b [DS-3135] Don't configure Solr with ConfigureLog4jListener which no longer exists. 2018-11-09 09:02:59 -05:00
Mark H. Wood
9133d289ed [DS-3135] Satisfy Checkstyle by removing unused includes; tighten up the code a little. 2018-11-09 09:02:59 -05:00
Mark H. Wood
959b5122b7 [DS-3135] Squash one more log4j1 dependency. 2018-11-09 09:02:59 -05:00
Mark H. Wood
f3505ed22b [DS-3135] Still more dependency cleanup. 2018-11-09 09:02:59 -05:00
Mark H. Wood
3f904dc4ec [DS-3135] Tell Log4J where to find its configuration. 2018-11-09 09:02:59 -05:00
Mark H. Wood
5cbe4fefb5 [DS-3135] Correct Log4J configuration file name. 2018-11-09 09:02:58 -05:00
Mark H. Wood
bb10bfe01a [DS-3135] More dependency cleanup 2018-11-09 09:02:58 -05:00
Mark H. Wood
9be54714d3 [DS-3135] More dependency fixes. 2018-11-09 09:02:58 -05:00
Mark H. Wood
f5d325b36b [DS-3135] Fix new class that uses old log4j. 2018-11-09 09:02:58 -05:00
Mark H. Wood
f48d005e2c [DS-3135] log4j 2 needs new configuration -- start with the command line tools 2018-11-09 09:02:58 -05:00
Patrick Trottier
538b0046e5 Change to version 2.6.2 of log4j
To ensure compatibility with Spring Boot v1.4.4
2018-11-09 09:02:58 -05:00
Patrick Trottier
8eb074b11f Fix dependency convergence 2018-11-09 09:02:58 -05:00
Patrick Trottier
a3db6f727c Fix import statement 2018-11-09 09:02:58 -05:00
Patrick Trottier
7996283d3c Fix files not conforming to code style 2018-11-09 09:02:58 -05:00
Patrick Trottier
56dd50d283 Additional refactoring for log4jv2 2018-11-09 09:02:58 -05:00
Patrick Trottier
7bd47362b6 Refactor additional loggers for log4jv2 2018-11-09 09:02:58 -05:00
Patrick Trottier
bb2dde22b1 Convert getLogger() and Log4J imports for Log4J 2 2018-11-09 09:01:25 -05:00
Patrick Trottier
a49486310a Update POMs to version 2.10.0 2018-11-09 08:58:56 -05:00
Patrick Trottier
a5a00816cc Additional refactoring for log4jv2 2018-11-09 08:58:56 -05:00
Patrick Trottier
00088c9739 Fix dependency convergence and use POM property for log4j version 2018-11-09 08:56:44 -05:00
Patrick Trottier
3b10caef0d Fix reference to log4jv2 2018-11-09 08:56:44 -05:00
Patrick Trottier
bda4fa0ba7 Refactor additional loggers for log4jv2 2018-11-09 08:56:44 -05:00
Patrick Trottier
35b5c6c321 Remove bean declaration for log4v1 service 2018-11-09 08:53:12 -05:00
Patrick Trottier
7f589a5ad6 Refactor import statements 2018-11-09 08:53:12 -05:00
Patrick Trottier
1eed2698ae Remove trailing whitespace in EventListenerExample 2018-11-09 08:51:47 -05:00
Patrick Trottier
b72fff4a25 Remove code related to log4j1 2018-11-09 08:51:47 -05:00
Patrick Trottier
1e3231967f Add Log4J LogManager to EventListenerExample 2018-11-09 08:41:42 -05:00
Patrick Trottier
7f4b4e9f8a Convert getLogger() and Log4J imports for Log4J 2 2018-11-09 08:41:42 -05:00
Patrick Trottier
fb4e7ae9cb Update POMs to version 2.10.0 2018-11-08 16:03:30 -05:00
Tim Donohue
195213d528 Merge pull request #2244 from tdonohue/commons_config_v2
DS-4056: Upgrade to Commons Configuration v2 (and Commons Lang v3)
2018-11-08 15:00:03 -06:00
Tim Donohue
34edf302b7 Remove unnecessary PropertyPlaceholderConfigurer 2018-11-08 20:35:58 +00:00
Tim Donohue
4d882bec60 Remove unnecessary exclusion 2018-11-08 20:35:58 +00:00
Tim Donohue
200cede397 Upgrade to commons-lang3 to avoid dependency conv issues. 2018-11-08 20:35:57 +00:00
Tim Donohue
b4eac9e5ff Update config-definition.xml to be v2 compatible 2018-11-08 20:35:57 +00:00
Tim Donohue
3ec449f3df Update dspace-api code to Commons Config v2 2018-11-08 20:35:54 +00:00
Tim Donohue
79d027776e Add Spring PropertySource integration + tests to prove it works 2018-11-08 20:33:28 +00:00
Tim Donohue
673d4b9d36 Replace DSpaceConfigurationFactoryBean with ConfigurationPropertiesFactoryBean 2018-11-08 20:33:28 +00:00
Tim Donohue
c810efccb3 Add / enhance unit tests to prove functionality unchanged after upgrade 2018-11-08 20:33:28 +00:00
Tim Donohue
cb913fd24f Initial upgrade of dspace-services to Commons Config v2 2018-11-08 20:33:28 +00:00
Tim Donohue
d364f351e6 Merge pull request #2259 from Georgetown-University-Libraries/ds4075
[DS-4075] correct command line usage for solr-upgrade-statistics-6x
2018-11-08 13:52:57 -06:00
Raf Ponsaerts
b37d0e2167 Applied community feedback 2018-11-08 14:28:58 +01:00
Terry Brady
68dca061b0 correct usage 2018-11-07 15:20:36 -08:00
Tim Donohue
af305d154a Merge pull request #1810 from Georgetown-University-Libraries/ds3602m
[DS-3602] Incremental Update of Legacy Id fields in Solr Statistics
2018-11-07 16:07:41 -06:00
Raf Ponsaerts
445e42ccc2 Merge branch 'w2p-57104_csv-import-error' into w2p-57104_csv-import-delete-functionality 2018-11-06 15:30:20 +01:00
Raf Ponsaerts
bfd0bd7362 [Task 57104] adding delete functionality for the csv import 2018-11-06 15:30:13 +01:00
Raf Ponsaerts
d8ced6aa7d Made sure that the relation.X name is always added to the item for every relationship that it has 2018-11-06 11:34:50 +01:00
Raf Ponsaerts
d90f4e63f5 [Task 56183] added the additional relationshiptype endpoint on the entitytype 2018-11-05 14:02:44 +01:00
Raf Ponsaerts
b81593c41c [Task 57104] added support for multiple relation import 2018-11-05 11:43:46 +01:00
Raf Ponsaerts
75358b2bdd Reverted the changes to the DSpaceCSVTest, bulkedit.cfg and thumbnail size in dspace.cfg 2018-11-05 10:51:08 +01:00
Raf Ponsaerts
0ee618385b [Task 56182] Added the endpoint to search across an items relationships by a label of the relationshiptype 2018-11-05 09:25:56 +01:00
Oliver Goldschmidt
53e548e4a9 remove OpenSearch path from config as its not configurable any more 2018-11-02 16:09:31 +01:00
Terry Brady
91f779aa8c handle shard corrupt multivals 2018-11-01 17:34:30 -07:00
Tim Donohue
218fdcbaf0 Merge pull request #2207 from tomdesair/Authentication_X-Forwarded-For
DS-4074: Only use X-Forwarded-For value from known proxies
2018-11-01 14:55:51 -05:00
Terry Brady
defa4671d8 more checkstyle fix 2018-11-01 10:39:40 -07:00
Bill Tantzen
8ec4780625 DS-4050 Search method returns 204 NoContent or 200 with an empty page if no result. 2018-11-01 12:31:16 -05:00
Terry Brady
a6276da8b0 checkstyle fixes 2018-11-01 10:13:20 -07:00
Terry Brady
ed94b9ce9e Update dspace/config/launcher.xml
upgrade launcher labels
2018-11-01 09:04:18 -07:00
Terry Brady
fa00fec49f Merge pull request #14 from Georgetown-University-Libraries/ds3602m1
Add string "-unmigrated" to ids that cannot be mapped
2018-10-31 16:20:22 -07:00
Terry Brady
fed22f30bf add unmigrated 2018-10-31 16:10:53 -07:00
Terry Brady
30c522f896 Merge pull request #13 from Georgetown-University-Libraries/ds3602m1
Remove DSpace Mig from Query, clarify command name
2018-10-31 15:50:19 -07:00
Terry Brady
b9b383bb05 rem comment 2018-10-31 15:47:14 -07:00
Terry Brady
3255206212 simplify not queries 2018-10-31 15:09:17 -07:00
Terry Brady
6c989433e1 try new query 2018-10-31 15:03:51 -07:00
Tim Donohue
74149ba247 Fix checkstyle failure (after rebase) 2018-10-31 15:29:34 -05:00
Terry Brady
9d5b59b46f Merge branch 'master' into ds3602m 2018-10-31 13:05:39 -07:00
Tim Donohue
9fee00445e Merge branch 'master' into Authentication_X-Forwarded-For 2018-10-31 14:55:53 -05:00
Tim Donohue
1f15bb874e Merge pull request #2206 from atmire/DS-4010_query-parameter-on-discover-endpoint-bug
[DS-4010] Removed the escaping on the query parameter for the discove…
2018-10-31 14:13:45 -05:00
Raf Ponsaerts
9e99c911c6 Added information into the exception message 2018-10-31 15:28:33 +01:00
Raf Ponsaerts
b5c3af6aa5 CSV Import fixes 2018-10-31 13:41:36 +01:00
Raf Ponsaerts
3c10296d0c [Task 57063] made the REST URL configurable instead of relying on the request.getUrl method to return the correct information 2018-10-31 10:17:48 +01:00
Tom Desair
b622781f5d DS-3542: Added unit tests for ClientInfoServiceImpl 2018-10-29 23:02:34 +01:00
Tim Donohue
f984cd17c1 Merge pull request #2249 from tdonohue/statistics_schema_uuid
Fix DS-4066 by update all IDs to string type in Solr Statistics schema
2018-10-29 16:31:29 -05:00
Tim Donohue
b283d5d9e4 Fix DS-4066 by update all IDs to string type in schema 2018-10-29 17:07:54 +00:00
Oliver Goldschmidt
0a9f5efcdc adds some tests 2018-10-29 13:53:51 +01:00
Oliver Goldschmidt
c08fb4af1d removes wrong line from comment 2018-10-29 13:53:20 +01:00
Oliver Goldschmidt
110f6c2a8d ignore test instead of having it commented out 2018-10-29 09:31:33 +01:00
Mark H. Wood
003831f4f6 Merge pull request #2247 from mwoodiupui/DS-3433
[DS-3433] Don't scare the admin. when an email Session is not found in JNDI
2018-10-26 12:08:07 -04:00
Raf Ponsaerts
eb0ea9a6bf Added relevant JavaDocs to public methods in altered classes 2018-10-26 08:48:33 +02:00
Tim Donohue
0c6c9afbd4 Merge pull request #2235 from tdonohue/dependency-cleanup
DS-4041: Update to Servlet Spec version 3.1.0 in all dependencies
2018-10-25 15:02:51 -05:00
Mark H. Wood
9c242087a7 [DS-3433] Silence exceptions when named Session not found or there is no JNDI initial context. 2018-10-25 15:34:21 -04:00
Tim Donohue
d5e22ec1af Merge pull request #2167 from mwoodiupui/DS-3970
[DS-3970] Unused, nonexistent configuration property referenced in confusing startup message
2018-10-25 12:18:58 -05:00
Raf Ponsaerts
af94859404 Added JavaDoc to public methods in new classes where necessary 2018-10-25 14:55:21 +02:00
Raf Ponsaerts
031642ddc1 Made relation.* searchable again 2018-10-24 15:24:03 +02:00
Raf Ponsaerts
b0216cb918 Test fixes and checkstyle fixes 2018-10-24 14:12:23 +02:00
Raf Ponsaerts
f223f1c067 DSpace 7 Entities 2018-10-24 10:24:09 +02:00
Tim Donohue
bd9bd233b7 Merge pull request #2239 from 4Science/inputform-rows
DS-3937 Add support for styling and rows in the submission-forms.xml
2018-10-23 11:49:33 -05:00
Tim Donohue
cb1a35a524 Merge pull request #2243 from mwoodiupui/DS-4031-7x
[DS-4031] Updated link to DRIVER guidelines -- 7_x.
2018-10-18 11:43:59 -05:00
Mark H. Wood
992c775058 [DS-4031] Updated link to DRIVER guidelines. 2018-10-18 09:41:13 -04:00
Andrea Bollini
e92852faca Merge pull request #2215 from mspalti/repositorypatch
DS-4021: Updated patch operations for Item repository and added patch endpoints for EPerson.
2018-10-18 11:28:31 +02:00
Pablo Prieto
819a42b69e Fixed Integration Tests 2018-10-17 23:32:39 -05:00
Terry Brady
826babb48e Merge pull request #2214 from terrywbrady/ds4012
[DS-4012] Support Multiple Dockerfiles in one branch
2018-10-17 11:29:57 -07:00
Andrea Bollini
d55189a4f2 Add javadoc to the matcher class 2018-10-17 16:58:04 +02:00
Tim Donohue
499f496d62 Merge branch 'dependency-cleanup' of github.com:tdonohue/DSpace into dependency-cleanup 2018-10-17 09:54:56 -05:00
Tim Donohue
6dc6102325 Update all web.xml files to use Servlet 3.1. Requires minor reorg in older webapps. 2018-10-17 09:53:46 -05:00
Andrea Bollini
8b0db601b0 Improved javadoc about new submission conf concepts 2018-10-17 16:50:41 +02:00
Michael W Spalti
f9861ec5ca [DS-4021] Corrected minor typo in integration test. 2018-10-15 13:22:43 -07:00
Michael W Spalti
f890f1705c [DS-4021] Updated integration tests.
Merge branch 'repositorypatch' of https://github.com/mspalti/DSpace into repositorypatch
2018-10-15 12:43:11 -07:00
Oliver Goldschmidt
b702582fb2 removes unnecessary import 2018-10-15 17:04:27 +02:00
Oliver Goldschmidt
fa3af00e4d removes unnecessary init method 2018-10-12 16:55:00 +02:00
Tim Donohue
0d59219ead Merge pull request #2232 from antzsch/ds4032-create-eperson-group
Add the ability to create EPerson Group via REST.
2018-10-12 09:47:59 -05:00
Oliver Goldschmidt
d421942321 fixes style issue 2018-10-12 10:10:13 +02:00
Oliver Goldschmidt
c7cde57c3a removing unnecessary settings in test configuration 2018-10-12 09:38:57 +02:00
Oliver Goldschmidt
0c6690aa82 cleaning up 2018-10-12 09:36:15 +02:00
Andrea Bollini
5b43bebf17 Merge pull request #2238 from tantz001/DS-3916
/api/config/submissiondefinitions/traditional/collections should return empty list rather than 204 [was#2189]
2018-10-12 00:34:05 +02:00
Michael W Spalti
837814b596 [DS-4021] Added a bit of IOC that helps to assure consistent checks before applying patch replace operations. 2018-10-11 14:54:29 -07:00
Andrea Bollini
3c0c4565df Merge pull request #2228 from ppmdo/DS-4024
Create top level community via REST [DS-4024]
2018-10-11 23:22:55 +02:00
Bill Tantzen
e2608f79bf return null/204 when endpoints return null (or empty list) and a single item is requested; return empty list/200 when endpoints return null (or empty list) and an array of items is requested. 2018-10-11 11:05:35 -05:00
Tim Donohue
d6d7a620c2 Merge pull request #2187 from 4Science/DS-3580_workspaceitem
DS-3850 Create new submissions from files or identifiers
2018-10-11 08:59:27 -05:00
Bill Tantzen
e64101a5e9 modified two additional ITs reflecting the changes to RestResourceController 2018-10-11 07:41:56 -05:00
Oliver Goldschmidt
ac788aacee fixes style issues in tests 2018-10-11 09:57:54 +02:00
Tim Donohue
4652bb8cab Dependency fixes for OAI-PMH using Servlet API 3.1 2018-10-10 18:32:18 +00:00
Michael W Spalti
ed8f245e7d [DS-4021] Replace patch operations on a non-existent value now result in a PatchBadRequestException.
Added missing license headers.

Added FIXME.
2018-10-10 10:38:01 -07:00
Andrea Bollini
d794c20385 DS-3937 remove unused methods, improve javadoc and testing 2018-10-10 19:29:40 +02:00
Bill Tantzen
77f4b9db6e minor format changes 2018-10-10 11:18:03 -05:00
Bill Tantzen
2f1f45f8a8 /api/config/submissiondefinitions/traditional/collections should return empty list rather than 204 #2189 2018-10-10 11:09:28 -05:00
Terry Brady
4c5b82c95a review comments 2018-10-10 08:28:02 -07:00
Oliver Goldschmidt
50490370d9 introduces tests for OpenSearchController 2018-10-10 16:38:43 +02:00
Andrea Bollini
e58870eb8f Merge branch 'DS-3580_workspaceitem' of https://github.com/4Science/DSpace into inputform-rows 2018-10-10 11:45:02 +02:00
Andrea Bollini
b6a35ebaf7 DS-3580 simplify the interface retrieving the filename from the multipartfile upload
Add additional Javadoc
Better align the interface with the Angular implementation
2018-10-10 11:35:36 +02:00
Michael W Spalti
7e4194b3a4 [DS-4021] Replace patch operations on a non-existent value now result in a PatchBadRequestException.
Added missing license headers.
2018-10-09 15:18:23 -07:00
Michael W Spalti
a9bf1d869d [DS-4021] Patch processing now operates on the rest model and commits changes after successful completion of all patch operations.
Also added new integration tests.

Minor changes to error handling and comments.

[DS-4021] Updated tests and javadocs.

Also updated the string comparison in EPersonRestRespository that triggers the update of the netid after a patch operation is complete.
2018-10-09 11:04:34 -07:00
Stefan
0f0066b0c0 Spilt the createAndRunTest() method in several methods in dependcy of the context what they test and code cleanup. 2018-10-09 19:04:58 +02:00
Andrea Bollini
dc51e4c059 Merge branch 'DS-3580_workspaceitem' of https://github.com/4Science/DSpace into inputform-rows 2018-10-09 19:02:27 +02:00
Pablo Prieto
a2e7069d1c Fixed missing @Test on ITs
Modified CommunityMatcher to use the generic CommunityMetadataMatcher
2018-10-08 23:19:53 -05:00
Pablo Prieto
486b1fb41d Added POST method on Communities endpoint to create a Top-Level Community
Wrote integration tests for admin, anonymous and normal eperson use cases
2018-10-08 23:09:04 -05:00
Andrea Bollini
e4874ae356 Merge branch 'master' of https://github.com/DSpace/DSpace into workspace-IT 2018-10-08 23:20:38 +02:00
Andrea Bollini
2458b5d621 DS-3850 ITs for metadata update, file upload, file update 2018-10-08 22:30:22 +02:00
Andrea Bollini
89ebd2c154 Make LicenseOperation more client friendly 2018-10-08 22:29:31 +02:00
Tim Donohue
3fd7cc903d Fix dependency convergence errors with OAI and SWORDv2 2018-10-08 20:07:44 +00:00
Tim Donohue
d3ee089d2e Update our DummyHttpServletRequest to Servlet Spec 3.1.0 2018-10-08 12:40:07 -05:00
Tim Donohue
447bea2921 Update to using Servlet API spec 3.1.0 in all modules 2018-10-08 17:11:37 +00:00
Tim Donohue
754915b8aa Remove duplicate dependency in dspace-api 2018-10-08 17:01:55 +00:00
Andrea Bollini
4a0b636b3d DS-3850 add ITs for create, bulkcreate, metadata patch 2018-10-07 22:23:40 +02:00
Andrea Bollini
2d2afd886c Set the status when the errors information are available 2018-10-07 22:21:17 +02:00
Andrea Bollini
951d039830 Fix bulk creation via file upload 2018-10-07 22:20:45 +02:00
Andrea Bollini
1bb1fd44a4 Fix javadoc misunderstanding 2018-10-06 21:02:53 +02:00
Andrea Bollini
a8863fd6a0 Add missing method to support bulk creation of objects via file upload 2018-10-06 13:08:58 +02:00
Andrea Bollini
9e85421ab4 Add javadocs 2018-10-06 13:06:47 +02:00
Andrea Bollini
c466d10c6a DS-3850 Integration Test for workspaceitem endpoint (find and delete methods)
Add predictable order to the workflowitem find query (by workspaceItemId instead than item uuid)
2018-10-05 22:36:50 +02:00
Stefan
79b950c8e4 Restrict the endpoint with an @PreAuthorize annotation 2018-10-05 18:28:29 +02:00
Stefan
f432568b31 Cleaning up the code and implement more test cases for the group creation. 2018-10-05 15:29:16 +02:00
Stefan
cc7bcf6007 Add the ability to create EPerson Group via REST. 2018-10-05 12:30:19 +02:00
Pablo Prieto
40ca9f6150 Added URL parameter parsing for Browse Endpoint 2018-10-05 00:21:49 -05:00
Pablo Prieto
267677a023 Merge branch 'master' of https://github.com/DSpace/DSpace into DS-3904 2018-10-03 21:01:34 -05:00
Michael W Spalti
ef8b0c642d [DS-4021] This commit includes code formatting and comment changes.
Also a few minor modifications to patch operations.

Minor code format changes.

Final code formatting changes for EPersonRestRepositoryIT.
2018-10-02 11:02:51 -07:00
Tim Donohue
a3437d4217 Merge pull request #2188 from atmire/DS-3991_HAL-browser-download
[DS-3991] made sure that the halbrowser now correctly downloads files
2018-10-02 10:04:57 -05:00
Michael W Spalti
1f091bd6bd DS-4021 Reintroduce abstract and implementation classes for patch operations.
Prior to this commit, the factory implementations used only path and value, ignoring the operation name. This commit reintroduces default implementations operations (add, move, remove, replace) and working implementations for eperson and item replace operations. The factory methods are now designed to be used specifically for replace operations. As needed, new factory methods can be added.
2018-10-01 23:16:05 -07:00
Raf Ponsaerts
aacb7b0ee3 [DS-3991] changed the default name of a downloaded file to content in the client.js 2018-10-02 08:09:02 +02:00
Michael W Spalti
ca697e8064 [DS-4021] Added patch method for EPerson.
Implements patch operations for password, canLogin, requireCertificate and netid. Includes integration tests.
2018-10-01 14:04:59 -07:00
Oliver Goldschmidt
f9bfd5ed6d Merge branch 'master' of https://github.com/DSpace/DSpace into dspace7_opensearch 2018-10-01 15:37:29 +02:00
Raf Ponsaerts
7e1ca49bcf [DS-4010] fixed test case 2018-10-01 15:15:31 +02:00
Raf Ponsaerts
06b4e5d12a [DS-4010] added IT tests for the query functionality on the discover endpoint 2018-10-01 13:59:29 +02:00
Michael W Spalti
d51493a1a3 Rolling back indentation changes in ItemRestRepositoryIT.
Reverting indentation changes in ItemRestRepositoryIT.

More updates to indentation.

Corrected line length errors in ItemRestRepositoryIT.

Final formatting updates for ItemRestRepositoryIT.
2018-09-27 18:40:59 -07:00
Michael W Spalti
65c7bbb6f6 Updated respository patch classes and ItemRestRepository. 2018-09-27 13:01:23 -07:00
Terry Brady
0b991a4d13 Create default and test image variants 2018-09-27 07:15:14 -07:00
Terry Brady
e0d10d7eec support mult dockerfiles per branch 2018-09-26 14:34:53 -07:00
Tom Desair
bd464e03f5 DS-3542: Only trust X-Forwared-For headers from trusted proxies 2018-09-26 22:13:03 +02:00
Raf Ponsaerts
47dc9482c6 [DS-4010] added IT test for the discoverQuery with a query parameter containing dc.date.issued 2018-09-25 14:14:51 +02:00
Raf Ponsaerts
df5816dff9 Revert "Revert "[DS-4010] added a catch for the search error and throw illegalarguement instead so that the exception handling picks it up easily and returns a proper error response with headers included""
This reverts commit ea42765849.
2018-09-21 15:25:24 +02:00
Raf Ponsaerts
ea42765849 Revert "[DS-4010] added a catch for the search error and throw illegalarguement instead so that the exception handling picks it up easily and returns a proper error response with headers included"
This reverts commit b192552888.
2018-09-21 14:38:55 +02:00
Raf Ponsaerts
b192552888 [DS-4010] added a catch for the search error and throw illegalarguement instead so that the exception handling picks it up easily and returns a proper error response with headers included 2018-09-21 14:07:01 +02:00
Raf Ponsaerts
aa4f3bb60b [Task 55655] set the status to Bad Request if the query entered wasn't valid and the searchservice threw an exception 2018-09-21 13:20:02 +02:00
Raf Ponsaerts
26a55312b3 [DS-4010] Fixed the tests 2018-09-18 08:49:38 +02:00
Raf Ponsaerts
a73b980955 [DS-4010] removed the comments that don't represent the code anymore 2018-09-17 16:50:01 +02:00
Tim Donohue
b0e076ddb1 Merge pull request #2205 from AlexanderS/DS-3664-master
[DS-3664] ImageMagick: Only execute "identify" on first page
2018-09-17 09:40:29 -05:00
Raf Ponsaerts
b3abd7743c [DS-4010] Removed the escaping on the query parameter for the discover endpoint 2018-09-17 15:58:11 +02:00
Alexander Sulfrian
4f53075317 ImageMagick: Only execute "identify" on first page
The Info object used to get the color format runs "identify" on the supplied
input file. If the file has many pages, this process might require some time.
"identify" supports the same syntax for the input file like the other
ImageMagick tools and we can simply restrict the pages by changing the input
file name.

This fixes DS-3664.
2018-09-17 15:10:33 +02:00
Andrea Bollini
7aed10c1b1 DS-3937 Add support for styling and rows in the submission-forms.xml 2018-09-11 20:45:08 +02:00
Mark H. Wood
0340cd73ad [DS-3990] Add simple unit test. 2018-09-09 17:19:39 -04:00
Mark H. Wood
43d01f283f [DS-3990] Dummy task to test properties and parameters. 2018-09-09 12:40:49 -04:00
Tim Donohue
705e0fabbd Merge pull request #2111 from atmire/DS-3741-hibernate-deprecation-fix-applied-feedback
DS-3741 hibernate deprecation fix applied feedback
2018-09-06 14:28:40 -05:00
Terry Brady
d4dbd3894a fix exist/doesn't exist 2018-09-06 11:23:56 -07:00
Raf Ponsaerts
0c58803a44 [DS-3741] restored the findByMetadataQuery method in a slightly altered version 2018-09-06 14:46:22 +02:00
Mark H. Wood
2c47e9b149 [DS-3990] Satisfy Checkstyle (tested this time)-: 2018-09-06 08:44:06 -04:00
Mark H. Wood
af27fde95b [DS-3990] Satisfy Checkstyle. 2018-09-06 08:42:13 -04:00
Mark H. Wood
e980811d4e [DS-3990] CLI access to task run parameters. 2018-09-06 07:01:46 -04:00
Raf Ponsaerts
349eebb834 [DS-3991] removed the xmlui content disposition threshold property 2018-08-30 07:55:59 +02:00
Raf Ponsaerts
287dce21fc [DS-3991] altered the disposition rules in the BitstreamContentRestController and MultiPartFileSender 2018-08-30 07:55:59 +02:00
Raf Ponsaerts
c56c1d38ef [DS-3991] made sure that the halbrowser now correctly downloads files through the bitstreamcontent repository 2018-08-30 07:55:10 +02:00
Mark H. Wood
173b7f4fce [DS-3990] Invent curation run parameters. 2018-08-28 10:46:00 -04:00
Luigi Andrea Pascarelli
25e3a69b8f DS-3851 Configurable Workflow endpoints 2018-08-27 20:20:24 +02:00
Luigi Andrea Pascarelli
c0066d2a2f DS-3850 Create new submissions from files or identifiers 2018-08-26 10:42:14 +02:00
Tim Donohue
66b89a786d Merge pull request #1976 from atmire/DS-3542_Spring-permission-evaluator
DS-3542 Spring security authorizations 2
2018-08-23 14:26:30 -05:00
Tim Donohue
d43aba5131 Merge pull request #2179 from 4Science/DS-3905_2
DS-3905 Implement a generic uuid lookup endpoint (DSpaceObjectService based)
2018-08-23 12:29:45 -05:00
Raf Ponsaerts
928ae19d1d Applied the feedback 2018-08-23 14:41:54 +02:00
Raf Ponsaerts
02fb1f4077 [DS-3741] removed unecessary import in comments and removed unused parameter of method 2018-08-23 14:41:54 +02:00
Raf Ponsaerts
154177db49 [DS-3741] fixed limit-offset switch in method call 2018-08-23 14:41:54 +02:00
Raf Ponsaerts
d0889f858c [DS-3741] fixed checkstyle 2018-08-23 14:41:54 +02:00
Raf Ponsaerts
ae53acfcb7 [DS-3741] Fixed findByMetadataQuery in ItemDAOImpl 2018-08-23 14:41:54 +02:00
Raf Ponsaerts
07a76d394b [DS-3741] added documentation to the AbstractHibernateDAO class 2018-08-23 14:41:53 +02:00
Raf Ponsaerts
b95a55d3d0 Applied the community feedback regarding the PR 2018-08-23 14:41:53 +02:00
Raf Ponsaerts
a12c22a767 Removed a bunch of unnecessary code in comments 2018-08-23 14:41:53 +02:00
Raf Ponsaerts
a33f5de0df [DS 3741] fixed checkstyle in DAOImpl classes 2018-08-23 14:41:53 +02:00
Tom Desair
db0a898c42 DS-3741: Added dspace-api/target/generated-sources/annotations to Coveralls source directories 2018-08-23 14:41:12 +02:00
Raf Ponsaerts
0491247c6d [DS-3741] fixes after cherry-picking 2018-08-23 14:41:12 +02:00
Raf Ponsaerts
1745640bbc [DS-3741] fixed wrong and/or usage and wrote the last query" 2018-08-23 14:40:32 +02:00
Raf Ponsaerts
877b84fda6 [DS-3741] fixed the two resultTransformer queries 2018-08-23 14:40:09 +02:00
Raf Ponsaerts
6f5b9cddc6 [DS-3741] cleaned up code in comments 2018-08-23 14:40:08 +02:00
Raf Ponsaerts
7385ce5b04 [DS-3741] All DSpace test cases succeed, 3 DAO functions left unwritten 2018-08-23 14:39:24 +02:00
Tom Desair
e69ebcfe82 DS-3741: Changed setTimestamp to setParameter + added subquery example 2018-08-23 14:38:58 +02:00
Raf Ponsaerts
e16857ce9e [DS-3741] fixes after cherry-pick 2018-08-23 14:38:58 +02:00
Andrea Bollini
ccb81d5091 Use the RestModel getTypePlural convenient method 2018-08-22 22:23:33 +02:00
Andrea Bollini
7c691d8086 Add addition test to cover the residual DSpaceObject types 2018-08-22 21:26:18 +02:00
Andrea Bollini
23f4ebc6c2 Move to a DSpaceService lookup strategy (DBMS) 2018-08-22 21:25:55 +02:00
Tom Desair
cad1b5a10e DS-3542: Fixes after rebase 2018-08-22 15:49:33 +02:00
Tom Desair
798264c001 DS-3542: Restore DSpaceRestRepository methods with authorizations checking 2018-08-22 14:34:44 +02:00
Tom Desair
5d257f4423 DS-3542: Correct DSO Admin level check 2018-08-22 14:34:44 +02:00
Tom Desair
93bac51ead DS-3542: Added custom DSpace AuthenticationEntryPoint in order to return 401 status 2018-08-22 14:34:43 +02:00
Tom Desair
44a0f9b2de DS-3542: Renamed EPERSON Spring security grant to AUTHENTICATED 2018-08-22 14:34:43 +02:00
Tom Desair
0dede9eacb DS-3542: Spring Authorization - @Preauthorization 2018-08-22 14:34:33 +02:00
Yana De Pauw
302c877d21 DS-3542: Spring Authorization - @Preauthorization 2018-08-22 14:32:40 +02:00
frederic
295d6d6ba1 DS-3542: Spring permission evaluator for REST API 2018-08-22 14:03:26 +02:00
Tim Donohue
238ede5f9d Merge pull request #2010 from mohideen/patch-1
DS-3885. Minor fix on CommunityServiceImpl.java
2018-08-21 15:57:25 -05:00
Pablo Prieto
c0f31e889f CheckStyle Fixes 2018-08-17 16:01:49 -05:00
Pablo Prieto
cd94bbd0ee Checkstyle errors fixed
Added ITs for startsWith + Scope
2018-08-17 12:15:42 -05:00
Andrea Bollini
14338cc183 DS-3905 Implement a generic uuid lookup endpoint
Refactor common code from the IdentifierRestController in a generic DSpaceObject converter
Added ITs
Add handling of standard Spring Bind exception to conform with REST return code (422) usage
2018-08-17 12:20:34 +02:00
Pablo Prieto
c02a5aa778 Wrote Integration Tests 2018-08-16 18:55:50 -05:00
Pablo Prieto
de29b6af46 Merge branch 'master' of https://github.com/DSpace/DSpace into DS-3904 2018-08-16 15:17:19 -05:00
Tim Donohue
3f7e2363c7 Merge pull request #2126 from 4Science/DS-3735
DS-3735 DS-3924 DS-3737 Implement CREATE, DELETE and some search methods for the EPersons endpoint
2018-08-16 15:07:25 -05:00
Pablo Prieto
322460827c Merge branch 'master' of https://github.com/DSpace/DSpace into DS-3904 2018-08-16 00:40:29 -05:00
Pablo Prieto
e25a732e6b Implemented startsWith for Items 2018-08-16 00:35:19 -05:00
Pablo Prieto
cbaad2e714 Implemented method for Browser Entries 2018-08-13 16:52:04 -05:00
Mark H. Wood
7bb8a8e9c4 [DS-3980] Conform to coding standard. 2018-08-13 15:23:18 -04:00
Mark H. Wood
81b41a2b78 [DS-3980] Replace absurd MIME types with 'application/octet-stream'. 2018-08-13 14:42:56 -04:00
Mark H. Wood
07c7fee64f [DS-3970] Remove confusing log message. 2018-08-13 12:55:13 -04:00
Andrea Bollini
0361ed77a5 Use better variable name and remove not thrown exception declaration 2018-08-11 10:08:58 +02:00
Tim Donohue
e65f94da0c Merge pull request #2135 from terrywbradyC9/dockerMaster
[DS-3967] 7x - Migrate Dockerfile to DSpace/DSpace
2018-08-10 09:51:50 -05:00
Tim Donohue
cb3b4d20cd Merge pull request #2163 from atmire/DS-3489_Search-REST-endpoint-Angular-alignment-rebased-on-master
DS-3489: search rest endpoint angular alignment rebased on master
2018-08-09 16:09:35 -05:00
Tim Donohue
063219add8 Merge pull request #2108 from ppmdo/DS-3910
DS-3910 Delete method on item endpoint
2018-08-09 11:03:39 -05:00
Raf Ponsaerts
6dbe089861 [DS-3489] fixed tests and travis failure 2018-08-09 10:22:26 +02:00
Raf Ponsaerts
587a807bec Removed facetLimit checks in the facetEntryMatcher since this is configurable and should never be hardcoded in tests 2018-08-09 10:22:26 +02:00
Raf Ponsaerts
a979392150 [DS-3489] editted the facet limit to be better handable for Angular 2018-08-09 10:22:26 +02:00
Raf Ponsaerts
db6c9faf83 Fixed rebase conflits 2018-08-09 10:22:06 +02:00
Pablo Prieto
84c123a547 Changed createTemplateItem to collection.withTemplateItem() to keep code coherency.
Applied changes to ItemRestRepositoryIT to work using withTemplateItem
2018-08-07 18:21:42 -05:00
Pablo Prieto
a25a046ffa Merge branch 'master' of https://github.com/DSpace/DSpace into DS-3910 2018-08-07 17:46:10 -05:00
Tim Donohue
544536ccef Merge pull request #2158 from Georgetown-University-Libraries/ds3795m
[DS-3795] Manage versions of some buggy transitive dependencies. (for master)
2018-08-07 12:48:43 -05:00
Tim Donohue
ac1ea060ef Merge duplicate dependencies up in Parent POM 2018-08-07 16:48:32 +00:00
Tim Donohue
d5b0b8b43a Merge pull request #2159 from tdonohue/ds3993-master
DS-3933 Updated Pubmed endpoints from http:// to https:// (for master)
2018-08-07 11:35:24 -05:00
Hendrik Geßner
dac12e1233 DS-3933 Updated Pubmed endpoints from http:// to https://. 2018-08-07 15:32:29 +00:00
Tim Donohue
500b2648a1 Merge pull request #2156 from Georgetown-University-Libraries/ds3700m
DS-3700: MediaFilterServiceImpl forgot to close an input stream (for master)
2018-08-07 10:25:12 -05:00
Tim Donohue
99aa4e0cfb Fix dependency convergence issues in new REST API. Add in a few missing changes. 2018-08-07 15:18:23 +00:00
Terry Brady
97a1180f6b fix ver override 2018-08-06 15:45:55 -07:00
Terry Brady
4a604488a7 fix migration/copy errs 2018-08-06 15:35:14 -07:00
Terry Brady
cc6bf1a343 Merge branch 'master' into ds3795m 2018-08-06 15:02:21 -07:00
Terry Brady
19172012b4 port pr2082 2018-08-06 15:00:02 -07:00
Tim Donohue
b159aa901a Merge pull request #2157 from Georgetown-University-Libraries/ds3629m
Ds 3629 listing of all groups misses pagination - XMLUI (for master)
2018-08-06 16:44:48 -05:00
Tim Donohue
a85228bd24 Merge pull request #2059 from MW3000/DS-3693-port-to-master-add-plugin-to-index-filenames-and-file-descriptions-for-files-in-ORIGINAL-bundle
[DS-3693] Port pull request #1838 to master
2018-08-06 16:42:35 -05:00
Tim Donohue
90d0bbccee Merge pull request #2144 from Georgetown-University-Libraries/ds3377m
[DS-3377] Solr queries too long (change search GET requests to POST) (for master)
2018-08-06 16:36:45 -05:00
Tim Donohue
2760c95099 Merge pull request #2155 from Georgetown-University-Libraries/ds3768m
DS-3768 Fixes the harvest solr parse error by (for master)
2018-08-06 16:24:44 -05:00
Tim Donohue
643f0fcde7 Merge pull request #2154 from Georgetown-University-Libraries/ds3310m
[DS-3310] Fix authentication problem in SwordV2 implementation (for master)
2018-08-06 16:19:34 -05:00
Tim Donohue
950c97bf6e Merge pull request #2153 from Georgetown-University-Libraries/ds3856m
DS-3856 - foreignkey-constraint community2community_child_comm_id_fkey (for master)
2018-08-06 16:15:09 -05:00
Tim Donohue
c038d76ced Merge pull request #2151 from Georgetown-University-Libraries/ds3822m
[DS-3822] Don't guess XML structure during ingest (for master)
2018-08-06 16:12:09 -05:00
Tim Donohue
ea9efd7fa3 Merge pull request #2152 from Georgetown-University-Libraries/ds3707m
DS-3707, DS-3715: Fixes to item level embargo/privacy in OAI-PMH (for master)
2018-08-06 16:09:45 -05:00
Tim Donohue
cbd5980368 Merge pull request #2150 from Georgetown-University-Libraries/ds3769m
DS-3769 Set the right hibernate property of … (for master)
2018-08-06 15:48:56 -05:00
Tim Donohue
3c4d5042fb Add in a single missing METHOD.POST 2018-08-06 20:46:20 +00:00
Tim Donohue
74f47de1e2 Add in missing javadocs. Revert small, accidental change. 2018-08-06 20:43:41 +00:00
Terry Brady
1b68a02dd7 Merge branch 'master' into DS-3693-port-to-master-add-plugin-to-index-filenames-and-file-descriptions-for-files-in-ORIGINAL-bundle 2018-08-06 13:23:52 -07:00
Terry Brady
79709ef76a port PR1864 2018-08-06 13:13:23 -07:00
Tim Donohue
21b628a792 Merge pull request #2149 from Georgetown-University-Libraries/ds3702m
DS-3702 & DS-3703: Rebuild the old behavior of bitstreams during vers… (for master)
2018-08-06 15:13:07 -05:00
Terry Brady
c4cf5b3c8d Merge pull request #2054 from tdonohue/DS-3447-ORCID-v2-master-port
DS-3447: ORCID v2 integration (port to master from PR#2039)
2018-08-06 13:08:31 -07:00
Tim Donohue
49947808a2 Merge pull request #2148 from Georgetown-University-Libraries/ds3680m
DS-3681: Refactoring of DSpaceAuthorityIndexer (for master)
2018-08-06 15:00:56 -05:00
Tim Donohue
8bf8a9fa69 Merge pull request #2147 from Georgetown-University-Libraries/ds3616m
DS-3616] Fix nested vocabulary search (master)
2018-08-06 14:51:28 -05:00
Tim Donohue
527dd1f64a Merge pull request #2146 from Georgetown-University-Libraries/ds3522m
DS-3522: Ensure Submission Policies are removed in XMLWorkflow (for master)
2018-08-06 14:46:16 -05:00
Tim Donohue
794dbfd550 Merge pull request #2145 from Georgetown-University-Libraries/ds3498m
DS-3498 quick fix. Disable full text snippets in search results & add warning (for master)
2018-08-06 14:43:12 -05:00
Tim Donohue
64493d24ca Merge pull request #2143 from Georgetown-University-Libraries/ds3332m
[DS-3332] Handle resolver is hardcoded in org.dspace.handle.UpdateHandlePrefix (for master)
2018-08-06 14:23:23 -05:00
Tim Donohue
c65755653b Merge pull request #2142 from Georgetown-University-Libraries/ds2675m
DS-2675: Bugfixing: Jump to value general errors with order (for master)
2018-08-06 14:14:27 -05:00
Terry Brady
f159c70d1c modify method signature 2018-08-06 12:09:56 -07:00
Terry Brady
1ab41bb4d1 checkstyle fix 2018-08-06 12:06:01 -07:00
Terry Brady
b9a7204420 port 1848 to master 2018-08-06 12:02:05 -07:00
Terry Brady
5437f7f8b9 port pr1890 minus jspui 2018-08-06 11:43:45 -07:00
Terry Brady
d396cf3490 checkstyle fix 2018-08-06 11:37:25 -07:00
Terry Brady
0b33907f5e port pr 1910 2018-08-06 11:33:49 -07:00
Terry Brady
0ced11c0af port pr1973 2018-08-06 11:25:22 -07:00
Terry Brady
33edc9530e checkstyle fix 2018-08-06 11:07:58 -07:00
Terry Brady
06ca8809e1 port PR 1867 2018-08-06 10:17:52 -07:00
Terry Brady
15d100050c port 1941 2018-08-03 23:28:33 -07:00
Terry Brady
ebb738e3e2 port pr1891 2018-08-03 23:24:06 -07:00
Terry Brady
9747d051a5 fix checkstyle 2018-08-03 23:18:18 -07:00
Terry Brady
7af1958d7e port pr1883 2018-08-03 23:12:38 -07:00
Terry Brady
fe648a678e port pr1835 2018-08-03 22:57:27 -07:00
Terry Brady
dfd0250ac1 port pr1770 2018-08-03 22:27:32 -07:00
Terry Brady
32399ac00b port pr 1779 2018-08-03 22:23:07 -07:00
Terry Brady
9bcaa07bce checkstyle fix 2018-08-03 22:16:53 -07:00
Terry Brady
c1590ce15e port pr2069 2018-08-03 17:53:03 -07:00
Terry Brady
220c8a5a48 checkstyle fix 2018-08-03 17:47:31 -07:00
Terry Brady
58855cd632 port pr2045 2018-08-03 17:40:56 -07:00
Terry Brady
77d67537f7 port pr1839 minus xmlui 2018-08-03 17:15:23 -07:00
Terry Brady
3eaaa5bb96 port pr1730 minus xmlui 2018-08-03 16:50:22 -07:00
Pablo Prieto
cb7c2519a3 Merge branch 'DS-3910' of https://github.com/ppmdo/DSpace into DS-3910 2018-08-03 16:56:06 -05:00
Pablo Prieto
ef67916563 Added JavaDoc for createTemplateItem 2018-08-03 16:54:56 -05:00
Tim Donohue
3eb16056c6 Merge pull request #2109 from atmire/DS-3489_Search-REST-query-filter-operator-and-facet-prefix
DS-3489: Search API query filter operator and facet prefix
2018-08-03 15:20:52 -05:00
Pablo
c076c99686 Merge branch 'master' into DS-3910 2018-08-02 11:15:11 -05:00
Terry Brady
5ba8bdd42b usage comment for docker local.cfg 2018-08-01 19:03:09 -07:00
Terry Brady
8f61b926bc move Dockerfile 2018-08-01 15:59:31 -07:00
Oliver Goldschmidt
b00bae2c23 improves error display for disabled opensearch and invalid format 2018-08-01 11:13:13 +02:00
Raf Ponsaerts
5b717bfa35 [DS-3489] added and finalized javadocs 2018-07-27 13:11:16 +02:00
Terry Brady
308dbe95e2 add usage 2018-07-25 17:58:15 -07:00
Terry Brady
27545eb088 DSpace 7 Docker migrate 2018-07-25 16:49:06 -07:00
Mark H. Wood
628c1250c2 Merge pull request #2031 from mwoodiupui/DS-3895
[DS-3895] Bitstream size can't be referenced in HQL queries
2018-07-25 11:54:08 -04:00
Oliver Goldschmidt
f3bdbe43cf support enable/disable of OpenSearch module from config file 2018-07-23 10:53:50 +02:00
Oliver Goldschmidt
d903daadce turn on opensearch in local.cfg, adopts new URL path in default configuration 2018-07-23 10:17:18 +02:00
Oliver Goldschmidt
be02e510e2 Merge branch 'master' of https://github.com/DSpace/DSpace into dspace7_opensearch 2018-07-20 19:35:41 +02:00
Oliver Goldschmidt
38f094caa8 adds rpp and start parameter 2018-07-20 19:28:46 +02:00
Raf Ponsaerts
bfac8d1d4b [DS-3489] adding javadocs and addressed a few issues 2018-07-19 15:50:40 +02:00
Oliver Goldschmidt
26d6ca9bac adds first draft of OpenSearchContoller 2018-07-19 12:05:19 +02:00
Andrea Bollini
92845557ff Provide javadoc for public and reusable methods 2018-07-19 12:01:05 +02:00
Andrea Bollini
9214ffa732 Create EPerson (draft) 2018-07-19 11:16:58 +02:00
Andrea Bollini
38a6e7ad9b Implement the delete eperson method 2018-07-19 11:16:58 +02:00
Andrea Bollini
9f2ca2633f IT for delete eperson 2018-07-19 11:16:58 +02:00
Andrea Bollini
ac342b4819 Implement the search methods 2018-07-19 11:16:58 +02:00
Andrea Bollini
a2569fe905 Declare eperson search methods mandatory parameters 2018-07-19 11:16:58 +02:00
Andrea Bollini
e248893e2f Add eperson search methods 2018-07-19 11:16:58 +02:00
Andrea Bollini
353eed6f60 Add IT for eperson search methods 2018-07-19 11:16:58 +02:00
Andrea Bollini
f807cc38a3 Fix authz issue on epersons endpoint 2018-07-19 11:16:58 +02:00
Andrea Bollini
516d6af6f4 Highlight authz issue on epersons endpoint 2018-07-19 11:16:50 +02:00
Pablo Prieto
e55846b212 Minor changes based on review. 2018-07-18 11:41:03 -05:00
Tim Donohue
8fb56be144 Merge pull request #1931 from atmire/DS-3782_upgrade-commons-collections-to-version-4
DS-3782 upgrade commons collections to version 4
2018-07-18 11:01:18 -05:00
Tim Donohue
2ede483e2b Add comment to POM about v3 dependency 2018-07-18 10:34:14 -05:00
Tom Desair
5a9edd1b70 DS-3782: Compilation error 2018-07-18 17:12:04 +02:00
Tom Desair
de33709364 DS-3782: Restore lines that should not be modified 2018-07-18 16:44:41 +02:00
Raf Ponsaerts
8ffbec58ca [DS-3782] Upgrade Commons Collections to version 4 2018-07-18 15:53:11 +02:00
Oliver Goldschmidt
eefb535214 setting DSpace classes to debug mode by default 2018-07-18 10:30:16 +02:00
Andrea Bollini
eb2b4184ef Merge pull request #2064 from mspalti/DS-3909
DS-3909: Adding withdraw, reinstate, and isDiscoverable support to item repository.
2018-07-18 00:01:05 +02:00
Andrea Bollini
c8a08e4bb9 Code clean & include test about authz issues 2018-07-17 19:33:57 +02:00
Andrea Bollini
6b78459b68 Avoid to hide the internal exceptions 2018-07-17 19:32:44 +02:00
Andrea Bollini
7328d49962 Introduce support for withdrawn in ItemBuilder 2018-07-17 19:31:41 +02:00
Andrea Bollini
bf9d63c363 Merge branch 'master' of https://github.com/DSpace/DSpace into DS-3909 2018-07-17 17:29:55 +02:00
Pablo Prieto
23c2f1a7ca Style corrections 2018-07-12 15:50:22 -05:00
Pablo Prieto
6eb2829b4a Amend 2018-07-12 13:28:20 -05:00
Pablo Prieto
005535fcbf Revert "Removed local configurations"
This reverts commit 71347a3faf.
2018-07-12 13:13:52 -05:00
Pablo Prieto
b38aa1723c Merge branch 'DS-3910' of https://github.com/ppmdo/DSpace into DS-3910 2018-07-12 12:55:36 -05:00
Pablo Prieto
71347a3faf Removed local configurations 2018-07-12 12:54:29 -05:00
Pablo Prieto
7b6ce941df Moved createTemplateItem method to CollectionBuilder class.
Correction: WorkspaceItemBuilder extends AbstractBuiilder
2018-07-12 12:46:46 -05:00
Tim Donohue
25a83f3b3f Merge pull request #2025 from 4Science/DS-3892
DS-3892 add the findBySchema name method
2018-07-12 08:58:10 -05:00
Tim Donohue
82de7bb125 Merge pull request #2107 from KingKrimmson/DS-3939-master
DS-3939 OAI-Harvester, skip item and continue if handle is missing
2018-07-09 09:56:21 -05:00
Tom Desair
91218ed36c DS-3489: Fixing tests after rebase part 2 2018-07-04 14:22:09 +02:00
Pablo Prieto
9ab8904bf2 WorskspaceItemBuilder amend 2018-07-02 16:25:36 -05:00
Chris Herron
ddbe928cd3 DS-3939 OAI-Harvester, skip item and continue if handle is missing 2018-07-02 12:45:57 -04:00
Tom Desair
ad1a3310f0 DS-3489: Fixing tests after rebase part 1 2018-07-02 17:31:02 +02:00
Pablo Prieto
fe2030d024 Working Implementation
Wrote additional integration tests
Created WorkspaceItemBuilder
2018-06-29 16:51:11 -05:00
Tom Desair
3dc2024265 DS-3489: Fix test 2018-06-29 17:18:31 +02:00
Tom Desair
4acde3df79 DS-3489: Integration test for facet prefix 2018-06-29 17:01:26 +02:00
Raf Ponsaerts
db0a3cd756 [DS-3489] finished implementing the search query filter operator. Also fixed a not equals issue in standard dspace 2018-06-29 16:42:52 +02:00
Tom Desair
4d0382766c DS-3489: Add support for facet prefix 2018-06-29 16:35:03 +02:00
Raf Ponsaerts
b805baddf5 [DS-3489] currently have most of the functionality, need to export the enum to xml still 2018-06-29 16:34:17 +02:00
Raf Ponsaerts
e98af34e21 [DS-3489] currently have most of the functionality, need to export the enum to xml still 2018-06-29 16:31:22 +02:00
Pablo Prieto
a487fb7d58 Working method to DELETE on /api/core/items
Wrote integration tests
2018-06-28 19:12:24 -05:00
Terry Brady
06ed982031 Merge pull request #1974 from atmire/DS-3819_Return-303-when-Shibboleth-redirect-is-needed
DS-3819: Return HTTP redirect when login URL is supported
2018-06-28 09:57:42 -06:00
Tom Desair
a921a86422 DS-3819: Test fixes 2018-06-28 14:46:02 +02:00
Yana De Pauw
5a1202222f DS-3819: Add button for authentication method with location parameter when multiple authentication methods are present 2018-06-28 13:21:58 +02:00
Tom Desair
55f5571e1d [DS-3819] Revert back to 401 status and move login URL to WWW-Authenticate header 2018-06-28 13:21:57 +02:00
Tom Desair
93549cab24 DS-3819: Correct response code and added Shibboleth Special Groups test 2018-06-28 13:21:54 +02:00
Tom Desair
ecce5fe9b2 DS-3819: Return HTTP redirect when login URL is supported 2018-06-28 13:21:09 +02:00
Pablo Prieto
3a51c7a962 Added method 2018-06-27 19:03:25 -05:00
Andrea Bollini
7d03da0fea DS-3892 refactor common code and align IT class name with our convention 2018-06-22 09:52:04 +02:00
Andrea Bollini
43eda56269 Merge pull request #2077 from 4Science/DS-3838
DS-3838 return 204 for successful logout
2018-06-21 16:56:17 +02:00
Andrea Bollini
f00d31e370 Align comment with the actual code/expectation 2018-06-21 16:05:36 +02:00
Andrea Bollini
b15a0027b7 Merge pull request #2073 from 4Science/minor-fixes
DS-3923 manage null return in Search Methods
2018-06-21 15:51:43 +02:00
Michael W Spalti
52401d0719 [DS-3909] Implements ItemRepository patch method for withdrawal and discoverable (using a proposed helper class).
[DS-3909] Added base class and item implementation for patch requests.

[DS-3090] Added new Spring component for the ItemPatch implementation and autowired it into ItemRestRepository.

[DS-3909] Added new exception for item operation path and corrected a few exception messages.

[DS-3909] Modified integration tests and test builders.

[DS-3909] Integration tests for withdraw and reinstate would not complete without adding REMOVE, ADD, and WRITE permissions to the collection and item mocks.  New builder methods added to set permissions.

[DS-3909] Some cleanup in integration test builder methods.

[DS-3090] Removed unused imports.

[DS-3909] Corrections to patch operation paths and values.

[DS-3909] Updates to exceptions and code documentation.

[DS-3090] Added test for missing value in withdrawal request.

[D-3909] Added exception for missing boolean value in discoverable operations.

[DS-3909] Corrected formatting error.

[DS-3909] Changed default error response for operation processing in AbstractResourcePatch to PatchBadRequestException.

[DS-3909] Code comments and javadoc.

[DS-3909] Minor changes to method visibility and documentation.

[DS-3909] Added copy operation to mirror JsonPatchConverter and change visibility of the sub-class replace method.

[DS-3909] Updating PR change requests.

[DS-3909] Minor corrections to ItemPatch.

[3909] Javadoc updates.

[DS-3909] Removed test code that is not necessary when tests are ran as admin user.

Also added ResourceNotFoundException to controller patch method.

[DS-3909] Minor formating correction.

[DS-3909] Removed unused field from ItemBuilder.
2018-06-16 14:43:51 -07:00
Andrea Bollini
40fc109c7c DS-3838 return 204 for successful logout 2018-06-09 21:29:22 +02:00
Andrea Bollini
d5d2fe0e68 Complete ITs and implementation 2018-06-09 18:06:04 +02:00
Andrea Bollini
18046604a7 Improve support for Parameter in search methods 2018-06-09 18:05:04 +02:00
Andrea Bollini
bc2d9e0706 Fix findBySchema implementation for undefined schema 2018-06-09 16:34:24 +02:00
Andrea Bollini
7f68244c4e Add test for search methods 2018-06-09 16:34:24 +02:00
Andrea Bollini
6b74ef5544 Add priority to force cleanup in a specfic order 2018-06-09 16:34:24 +02:00
Andrea Bollini
a87baf6edb Fix checkstyle issues 2018-06-09 16:34:24 +02:00
Andrea Bollini
7e6d1a58ec DS-3892 add the findBySchema name method 2018-06-09 16:34:23 +02:00
Andrea Bollini
6af5349e74 DS-3923 manage null return in Search Methods 2018-06-01 07:13:08 +02:00
Andrea Bollini
af758212f7 Cleanup license header 2018-06-01 07:13:08 +02:00
Andrea Bollini
a6c1113165 Exclude rebel.xml configuration from license:check 2018-05-31 21:11:51 +02:00
Andrea Bollini
27ee3a13f0 Fix NPE when a valid JWT related to a deleted eperson is presented 2018-05-31 21:11:51 +02:00
Andrea Bollini
7157c43bc1 Fix admin eperson in test 2018-05-31 21:11:36 +02:00
Michael W Spalti
416fc80010 [DS-3909] Corrects UUID bug in RestResourceController. 2018-05-30 13:35:41 -07:00
Raf Ponsaerts
d1f8c56be5 Fixed a bug which caused the query OR without quotes to crash 2018-05-18 11:56:11 +02:00
Martin Walk
388858de14 Adapt integration tests to added search filters 2018-05-15 10:43:06 +02:00
Tim Donohue
e6d5254eff Temporary POM exclusion to avoid dependency conflict 2018-05-14 16:39:15 -05:00
Raf Ponsaerts
5c7e5ee4e2 Added the tests for the facet min/max values on the search/xx endpoint 2018-05-14 16:18:55 +02:00
Martin Walk
e29b72d228 Trigger Travis build 2018-05-14 15:06:07 +02:00
Martin Walk
5970b3ccfa Please the Checkstyle plugin 2018-05-14 14:41:58 +02:00
Martin Walk
5f52704ce5 Try to please the Checkstyle plugin :) 2018-05-14 14:18:21 +02:00
Raf Ponsaerts
7acaa69c95 Implemented the expose min-max on the facets in the search/xx endpoints 2018-05-14 13:50:15 +02:00
Martin Walk
3b62f4de86 Port pull request #1838 to master 2018-05-14 13:48:17 +02:00
Raf Ponsaerts
6b57003e34 Working on the min-max facet implementation 2018-05-14 08:29:52 +02:00
Tim Donohue
032a76ddb8 DS-3447: ORCID v2 integration (port to master from PR#2039) 2018-05-10 16:38:10 -05:00
Tom Desair
e62d63f343 DS-3489: Fix bugs to address test failures 2018-04-30 11:53:51 +02:00
Tom Desair
f2e2d01560 DS-3489: Improvements on /search/discover/facets 2018-04-30 11:53:36 +02:00
Tom Desair
b0e7994520 DS-3489: Fix search result object pagination links 2018-04-30 11:53:36 +02:00
Raf Ponsaerts
bc581e64e0 [DS-3489] updated the /api/discover/search endpoint to be more compatible with angular's expectations 2018-04-30 11:53:27 +02:00
Raf Ponsaerts
519f0a812c [DS-3489] added the facetLimit property to the buildFacetQueries for dates, reversed the order of the returned list to show the oldest pair first 2018-04-30 11:48:00 +02:00
Raf Ponsaerts
e36ded0244 [DS-3489] updated the /api/discover/search endpoint to be more compatible with angular's expectations 2018-04-30 11:47:59 +02:00
Mark H. Wood
24575d8ba1 [DS-3895] Rename Bitstream.getSize() to getSizeBytes(). 2018-04-23 13:29:31 -04:00
Mohamed Mohideen Abdul Rasheed
6d1b695ca5 Minor fix on CommunityServiceImpl.java
Update methods should call clearModified.
2018-03-29 15:46:02 -04:00
Terry W Brady
287cc56476 Port 6x to 7x 2017-07-14 09:46:23 -07:00
1142 changed files with 49878 additions and 19289 deletions

6
.dockerignore Normal file
View File

@@ -0,0 +1,6 @@
.git/
.idea/
.settings/
*/target/
dspace/modules/*/target/
Dockerfile.*

View File

@@ -26,19 +26,20 @@ before_install:
# Skip install stage, as we'll do it below
install: "echo 'Skipping install stage, dependencies will be downloaded during build and test stages.'"
# Two stage Build and Test
# 1. Install & Unit Test APIs
# 2. Assemble DSpace
# Build DSpace and run both Unit and Integration Tests
script:
# 1. [Install & Unit Test] Check source code licenses and run source code Unit Tests
# Summary of flags used (below):
# license:check => Validate all source code license headers
# -Dmaven.test.skip=false => Enable DSpace Unit Tests
# -DskipITs=false => Enable DSpace Integration Tests
# -P !assembly => Skip normal assembly (as it can be memory intensive)
# -P !assembly => Skip assembly of "dspace-installer" directory (as it can be memory intensive)
# -B => Maven batch/non-interactive mode (recommended for CI)
# -V => Display Maven version info before build
# -Dsurefire.rerunFailingTestsCount=2 => try again for flakey tests, and keep track of/report on number of retries
- "mvn clean install license:check -Dmaven.test.skip=false -DskipITs=false -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"
# 2. [Assemble DSpace] Ensure overlay & assembly process works (from [src]/dspace/)
# -P !assembly => SKIP the actual building of [src]/dspace/dspace-installer (as it can be memory intensive)
- "cd dspace && mvn package -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"
# After a successful build and test (see 'script'), send code coverage reports to coveralls.io
# These code coverage reports are generated by jacoco-maven-plugin (during test process above).
after_success:
# Run "verify", enabling the "coveralls" profile. This sends our reports to coveralls.io (see coveralls-maven-plugin)
- "cd dspace && mvn verify -P coveralls"

24
Dockerfile.dependencies Normal file
View File

@@ -0,0 +1,24 @@
# This image will be published as dspace/dspace-dependencies
# The purpose of this image is to make the build for dspace/dspace run faster
# Step 1 - Run Maven Build
FROM maven:3-jdk-8 as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
RUN useradd dspace \
&& mkdir /home/dspace \
&& chown -Rv dspace: /home/dspace
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/local.cfg /app/local.cfg
# Trigger the installation of all maven dependencies
# Clean up the built artifacts in the same step to keep the docker image small
RUN mvn package && mvn clean
# Clear the contents of the /app directory so no artifacts are left when dspace:dspace is built
USER root
RUN rm -rf /app/*

63
Dockerfile.jdk8 Normal file
View File

@@ -0,0 +1,63 @@
# This image will be published as dspace/dspace
# See https://dspace-labs.github.io/DSpace-Docker-Images/ for usage details
#
# This version is JDK8 compatible
# - tomcat:8-jre8
# - ANT 1.10.5
# - maven:3-jdk-8
# - note:
# - default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x-jdk8
# Step 1 - Run Maven Build
FROM dspace/dspace-dependencies:dspace-7_x as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-install directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install \
&& chown -Rv dspace: /app
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/local.cfg /app/local.cfg
# Build DSpace. Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
RUN mvn package && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Step 2 - Run Ant Deploy
FROM tomcat:8-jre8 as ant_build
ARG TARGET_DIR=dspace-installer
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION 1.10.5
ENV ANT_HOME /tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH
RUN mkdir $ANT_HOME && \
wget -qO- "https://www.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
RUN ant init_installation update_configs update_code update_webapps update_solr_indexes
# Step 3 - Run tomcat
# Create a new tomcat image that does not retain the the build directory contents
FROM tomcat:8-jre8
ENV DSPACE_INSTALL=/dspace
COPY --from=ant_build /dspace $DSPACE_INSTALL
EXPOSE 8080 8009
ENV JAVA_OPTS=-Xmx2000m
RUN ln -s $DSPACE_INSTALL/webapps/solr /usr/local/tomcat/webapps/solr && \
ln -s $DSPACE_INSTALL/webapps/spring-rest /usr/local/tomcat/webapps/spring-rest && \
ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest && \
ln -s $DSPACE_INSTALL/webapps/oai /usr/local/tomcat/webapps/oai && \
ln -s $DSPACE_INSTALL/webapps/rdf /usr/local/tomcat/webapps/rdf && \
ln -s $DSPACE_INSTALL/webapps/sword /usr/local/tomcat/webapps/sword && \
ln -s $DSPACE_INSTALL/webapps/swordv2 /usr/local/tomcat/webapps/swordv2

69
Dockerfile.jdk8-test Normal file
View File

@@ -0,0 +1,69 @@
# This image will be published as dspace/dspace
# See https://dspace-labs.github.io/DSpace-Docker-Images/ for usage details
#
# This version is JDK8 compatible
# - tomcat:8-jre8
# - ANT 1.10.5
# - maven:3-jdk-8
# - note:
# - default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x-jdk8-test
# Step 1 - Run Maven Build
FROM dspace/dspace-dependencies:dspace-7_x as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-install directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install \
&& chown -Rv dspace: /app
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/local.cfg /app/local.cfg
# Build DSpace. Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
RUN mvn package && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Step 2 - Run Ant Deploy
FROM tomcat:8-jre8 as ant_build
ARG TARGET_DIR=dspace-installer
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION 1.10.5
ENV ANT_HOME /tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH
RUN mkdir $ANT_HOME && \
wget -qO- "https://www.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
RUN ant init_installation update_configs update_code update_webapps update_solr_indexes
# Step 3 - Run tomcat
# Create a new tomcat image that does not retain the the build directory contents
FROM tomcat:8-jre8
ENV DSPACE_INSTALL=/dspace
COPY --from=ant_build /dspace $DSPACE_INSTALL
EXPOSE 8080 8009
ENV JAVA_OPTS=-Xmx2000m
RUN ln -s $DSPACE_INSTALL/webapps/solr /usr/local/tomcat/webapps/solr && \
ln -s $DSPACE_INSTALL/webapps/spring-rest /usr/local/tomcat/webapps/spring-rest && \
ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest && \
ln -s $DSPACE_INSTALL/webapps/oai /usr/local/tomcat/webapps/oai && \
ln -s $DSPACE_INSTALL/webapps/rdf /usr/local/tomcat/webapps/rdf && \
ln -s $DSPACE_INSTALL/webapps/sword /usr/local/tomcat/webapps/sword && \
ln -s $DSPACE_INSTALL/webapps/swordv2 /usr/local/tomcat/webapps/swordv2
COPY dspace/src/main/docker/test/solr_web.xml $DSPACE_INSTALL/webapps/solr/WEB-INF/web.xml
COPY dspace/src/main/docker/test/rest_web.xml $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml
RUN sed -i -e "s|\${dspace.dir}|$DSPACE_INSTALL|" $DSPACE_INSTALL/webapps/solr/WEB-INF/web.xml && \
sed -i -e "s|\${dspace.dir}|$DSPACE_INSTALL|" $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml

View File

@@ -366,7 +366,6 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/)
* MaxMind GeoIP Legacy API (com.maxmind.geoip:geoip-api:1.3.0 - https://github.com/maxmind/geoip-api-java)
* JHighlight (com.uwyn:jhighlight:1.0 - https://jhighlight.dev.java.net/)
* DSpace TM-Extractors Dependency (org.dspace.dependencies:dspace-tm-extractors:1.0.1 - http://projects.dspace.org/dspace-pom/dspace-tm-extractors)
* A Hibernate O/RM Module (org.hibernate:hibernate-core:4.2.21.Final - http://hibernate.org)
* A Hibernate O/RM Module (org.hibernate:hibernate-ehcache:4.2.21.Final - http://hibernate.org)
* Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:4.0.2.Final - http://hibernate.org)

View File

@@ -40,6 +40,9 @@ Please be aware that, as a Java web application, DSpace requires a database (Pos
and a servlet container (usually Tomcat) in order to function.
More information about these and all other prerequisites can be found in the Installation instructions above.
## Dockerfile Usage
See the [DSpace Docker Tutorial](https://dspace-labs.github.io/DSpace-Docker-Images/).
## Contributing
DSpace is a community built and supported project. We do not have a centralized development or support team,
@@ -76,6 +79,57 @@ install, upgrade, customize or host DSpace, then we recommend getting in touch w
The DSpace Issue Tracker can be found at: https://jira.duraspace.org/projects/DS/summary
## Testing
### Running Tests
By default, in DSpace, Unit Tests and Integration Tests are disabled. However, they are
run automatically by [Travis CI](https://travis-ci.org/DSpace/DSpace/) for all Pull Requests and code commits.
* How to run both Unit Tests (via `maven-surefire-plugin`) and Integration Tests (via `maven-failsafe-plugin`):
```
# NOTE: while "mvn test" runs Unit Tests,
# Integration Tests only run for "verify" or "install" phases
mvn clean install -Dmaven.test.skip=false -DskipITs=false
```
* How to run just Unit Tests:
```
mvn clean test -Dmaven.test.skip=false
```
* How to run a *single* Unit Test
```
# Run all tests in a specific test class
# NOTE: testClassName is just the class name, do not include package
mvn clean test -Dmaven.test.skip=false -Dtest=[testClassName]
# Run one test method in a specific test class
mvn clean test -Dmaven.test.skip=false -Dtest=[testClassName]#[testMethodName]
```
* How to run Integration Tests (requires running Unit tests too)
```
mvn clean verify -Dmaven.test.skip=false -DskipITs=false
```
* How to run a *single* Integration Test (requires running Unit tests too)
```
# Run all integration tests in a specific test class
# NOTE: Integration Tests only run for "verify" or "install" phases
# NOTE: testClassName is just the class name, do not include package
mvn clean verify -Dmaven.test.skip=false -DskipITs=false -Dit.test=[testClassName]
# Run one test method in a specific test class
mvn clean verify -Dmaven.test.skip=false -DskipITs=false -Dit.test=[testClassName]#[testMethodName]
```
* How to run only tests of a specific DSpace module
```
# Before you can run only one module's tests, other modules may need installing into your ~/.m2
cd [dspace-src]
mvn clean install
# Then, move into a module subdirectory, and run the test command
cd [dspace-src]/dspace-spring-rest
# Choose your test command from the lists above
```
## License
DSpace source code is freely available under a standard [BSD 3-Clause license](https://opensource.org/licenses/BSD-3-Clause).

View File

@@ -1,5 +1,4 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.dspace</groupId>
<artifactId>dspace-api</artifactId>
@@ -13,7 +12,7 @@
<parent>
<groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId>
<version>7.0-SNAPSHOT</version>
<version>7.0-preview-1</version>
<relativePath>..</relativePath>
</parent>
@@ -51,6 +50,10 @@
<configuration>
<debug>true</debug>
<showDeprecation>true</showDeprecation>
<compilerArguments>
<processor>org.hibernate.jpamodelgen.JPAMetaModelEntityProcessor</processor>
</compilerArguments>
</configuration>
</plugin>
<plugin>
@@ -81,6 +84,7 @@
<exclude>**/src/test/resources/**</exclude>
<exclude>**/src/test/data/**</exclude>
<exclude>**/.gitignore</exclude>
<exclude>**/src/main/resources/rebel.xml</exclude>
<exclude>src/test/data/dspaceFolder/config/spiders/**</exclude>
<exclude>src/main/java/org/apache/solr/handler/extraction/ExtractingParams.java</exclude>
</excludes>
@@ -307,6 +311,10 @@
<groupId>org.hibernate</groupId>
<artifactId>hibernate-ehcache</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-jpamodelgen</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-validator-cdi</artifactId>
@@ -334,10 +342,6 @@
<groupId>org.dspace</groupId>
<artifactId>mets</artifactId>
</dependency>
<dependency>
<groupId>org.dspace.dependencies</groupId>
<artifactId>dspace-tm-extractors</artifactId>
</dependency>
<dependency>
<groupId>org.apache.jena</groupId>
<artifactId>apache-jena-libs</artifactId>
@@ -351,6 +355,14 @@
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
@@ -361,10 +373,6 @@
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
@@ -383,8 +391,8 @@
<artifactId>commons-io</artifactId>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
@@ -400,7 +408,7 @@
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<artifactId>javax.servlet-api</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
@@ -418,8 +426,16 @@
<artifactId>jdom</artifactId>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
</dependency>
<dependency>
<groupId>oro</groupId>
@@ -532,6 +548,10 @@
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
@@ -549,6 +569,10 @@
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
@@ -562,7 +586,7 @@
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-solrj</artifactId>
<version>${solr.version}</version>
<version>${solr.client.version}</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
@@ -576,8 +600,8 @@
</dependency>
<dependency>
<groupId>commons-configuration</groupId>
<artifactId>commons-configuration</artifactId>
<groupId>org.apache.commons</groupId>
<artifactId>commons-configuration2</artifactId>
</dependency>
<dependency>
<groupId>com.maxmind.geoip2</groupId>
@@ -597,7 +621,7 @@
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>
<version>4.10.4</version>
<version>${solr.client.version}</version>
</dependency>
<dependency>
@@ -616,7 +640,6 @@
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>19.0</version>
</dependency>
@@ -719,20 +742,19 @@
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- S3 also wanted jackson... -->
<!-- For ORCID v2 integration -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<groupId>org.dspace</groupId>
<artifactId>orcid-jaxb-api</artifactId>
<version>2.1.0</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20180130</version>
</dependency>
</dependencies>

View File

@@ -17,7 +17,7 @@ import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections4.CollectionUtils;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Community;
import org.dspace.content.factory.ContentServiceFactory;
@@ -180,14 +180,10 @@ public class CommunityFiliator {
// second test - circularity: parent's parents can't include proposed
// child
List<Community> parentDads = parent.getParentCommunities();
for (int i = 0; i < parentDads.size(); i++) {
if (parentDads.get(i).getID().equals(child.getID())) {
System.out
.println("Error, circular parentage - child is parent of parent");
if (parentDads.contains(child)) {
System.out.println("Error, circular parentage - child is parent of parent");
System.exit(1);
}
}
// everthing's OK
communityService.addSubcommunity(c, parent, child);
@@ -210,26 +206,15 @@ public class CommunityFiliator {
throws SQLException, AuthorizeException, IOException {
// verify that child is indeed a child of parent
List<Community> parentKids = parent.getSubcommunities();
boolean isChild = false;
for (int i = 0; i < parentKids.size(); i++) {
if (parentKids.get(i).getID().equals(child.getID())) {
isChild = true;
break;
}
}
if (!isChild) {
System.out
.println("Error, child community not a child of parent community");
if (!parentKids.contains(child)) {
System.out.println("Error, child community not a child of parent community");
System.exit(1);
}
// OK remove the mappings - but leave the community, which will become
// top-level
child.getParentCommunities().remove(parent);
parent.getSubcommunities().remove(child);
child.removeParentCommunity(parent);
parent.removeSubCommunity(child);
communityService.update(c, child);
communityService.update(c, parent);

View File

@@ -15,7 +15,7 @@ import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.I18nUtil;

View File

@@ -21,6 +21,7 @@ import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.NonUniqueMetadataException;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.MetadataFieldService;
@@ -248,7 +249,7 @@ public class MetadataImporter {
// If the schema is not provided default to DC
if (schema == null) {
schema = MetadataSchema.DC_SCHEMA;
schema = MetadataSchemaEnum.DC.getName();
}

View File

@@ -17,7 +17,7 @@ import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.BitstreamFormat;
@@ -47,7 +47,7 @@ public class RegistryLoader {
/**
* log4j category
*/
private static Logger log = Logger.getLogger(RegistryLoader.class);
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(RegistryLoader.class);
protected static BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance()
.getBitstreamFormatService();

View File

@@ -21,8 +21,10 @@ import javax.xml.transform.TransformerException;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.ParseException;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
@@ -45,6 +47,7 @@ import org.xml.sax.SAXException;
* an XML file.
*
* The XML file structure needs to be:
* <p>
* {@code
* <import_structure>
* <community>
@@ -56,29 +59,31 @@ import org.xml.sax.SAXException;
* </community>
* </import_structure>
* }
* it can be arbitrarily deep, and supports all the metadata elements
* <p>
* It can be arbitrarily deep, and supports all the metadata elements
* that make up the community and collection metadata. See the system
* documentation for more details
* documentation for more details.
*
* @author Richard Jones
*/
public class StructBuilder {
/**
* the output xml document which will contain updated information about the
* imported structure
* The output XML document which will contain updated information about the
* imported structure.
*/
private static org.jdom.Document xmlOutput = new org.jdom.Document(new Element("imported_structure"));
private static final org.jdom.Document xmlOutput
= new org.jdom.Document(new Element("imported_structure"));
/**
* a hashtable to hold metadata for the collection being worked on
* A hash table to hold metadata for the collection being worked on.
*/
private static Map<String, String> collectionMap = new HashMap<String, String>();
private static final Map<String, String> collectionMap = new HashMap<>();
/**
* a hashtable to hold metadata for the community being worked on
* A hash table to hold metadata for the community being worked on.
*/
private static Map<String, String> communityMap = new HashMap<String, String>();
private static final Map<String, String> communityMap = new HashMap<>();
protected static CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
@@ -101,19 +106,34 @@ public class StructBuilder {
* with the handle for each imported item added as an attribute.
*
* @param argv the command line arguments given
* @throws Exception if an error occurs
* @throws ParserConfigurationException passed through.
* @throws SQLException passed through.
*/
public static void main(String[] argv)
throws Exception {
CommandLineParser parser = new PosixParser();
throws ParserConfigurationException, SQLException {
CommandLineParser parser = new DefaultParser();
Options options = new Options();
options.addOption("f", "file", true, "file");
options.addOption("h", "help", false, "help");
options.addOption("?", "help");
options.addOption("f", "file", true, "input structure document");
options.addOption("e", "eperson", true, "eperson");
options.addOption("o", "output", true, "output");
options.addOption("o", "output", true, "output structure document");
CommandLine line = parser.parse(options, argv);
CommandLine line = null;
try {
line = parser.parse(options, argv);
} catch (ParseException ex) {
System.err.println(ex.getMessage());
usage(options);
System.exit(1);
}
if (line.hasOption('h') || line.hasOption('?')) {
usage(options);
System.exit(0);
}
String file = null;
String eperson = null;
@@ -132,22 +152,41 @@ public class StructBuilder {
}
if (output == null || eperson == null || file == null) {
usage();
System.exit(0);
usage(options);
System.exit(1);
}
// create a context
Context context = new Context();
// set the context
try {
context.setCurrentUser(ePersonService.findByEmail(context, eperson));
} catch (SQLException ex) {
System.err.format("That user could not be found: %s%n", ex.getMessage());
System.exit(1);
}
// load the XML
Document document = loadXML(file);
Document document = null;
try {
document = loadXML(file);
} catch (IOException ex) {
System.err.format("The input document could not be read: %s%n", ex.getMessage());
System.exit(1);
} catch (SAXException ex) {
System.err.format("The input document could not be parsed: %s%n", ex.getMessage());
System.exit(1);
}
// run the preliminary validation, to be sure that the the XML document
// is properly structured
try {
validate(document);
} catch (TransformerException ex) {
System.err.format("The input document is invalid: %s%n", ex.getMessage());
System.exit(1);
}
// load the mappings into the member variable hashmaps
communityMap.put("name", "name");
@@ -164,60 +203,69 @@ public class StructBuilder {
collectionMap.put("license", "license");
collectionMap.put("provenance", "provenance_description");
Element[] elements = new Element[]{};
try {
// get the top level community list
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
// run the import starting with the top level communities
Element[] elements = handleCommunities(context, first, null);
elements = handleCommunities(context, first, null);
} catch (TransformerException ex) {
System.err.format("Input content not understood: %s%n", ex.getMessage());
System.exit(1);
} catch (AuthorizeException ex) {
System.err.format("Not authorized: %s%n", ex.getMessage());
System.exit(1);
}
// generate the output
Element root = xmlOutput.getRootElement();
for (int i = 0; i < elements.length; i++) {
root.addContent(elements[i]);
for (Element element : elements) {
root.addContent(element);
}
// finally write the string into the output file
try {
BufferedWriter out = new BufferedWriter(new FileWriter(output));
try (BufferedWriter out = new BufferedWriter(new FileWriter(output));) {
out.write(new XMLOutputter().outputString(xmlOutput));
out.close();
} catch (IOException e) {
System.out.println("Unable to write to output file " + output);
System.exit(0);
System.exit(1);
}
context.complete();
}
/**
* Output the usage information
* Output the usage information.
*/
private static void usage() {
System.out.println("Usage: java StructBuilder -f <source XML file> -o <output file> -e <eperson email>");
System.out.println(
"Communities will be created from the top level, and a map of communities to handles will be returned in " +
"the output file");
return;
private static void usage(Options options) {
HelpFormatter helper = new HelpFormatter();
helper.printHelp("java StructBuilder -f <source XML file> -o <output file> -e <eperson email>",
"Load community/collection structure from a file.",
options,
"Communities will be created from the top level,"
+ " and a map of communities to handles will be returned"
+ " in the output file.");
}
/**
* Validate the XML document. This method does not return, but if validation
* fails it generates an error and ceases execution
* Validate the XML document. This method returns if the document is valid.
* If validation fails it generates an error and ceases execution.
*
* @param document the XML document object
* @throws TransformerException if transformer error
*/
private static void validate(org.w3c.dom.Document document)
throws TransformerException {
StringBuffer err = new StringBuffer();
StringBuilder err = new StringBuilder();
boolean trip = false;
err.append("The following errors were encountered parsing the source XML\n");
err.append("No changes have been made to the DSpace instance\n\n");
err.append("The following errors were encountered parsing the source XML.\n");
err.append("No changes have been made to the DSpace instance.\n\n");
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
if (first.getLength() == 0) {
err.append("-There are no top level communities in the source document");
err.append("-There are no top level communities in the source document.");
System.out.println(err.toString());
System.exit(0);
}
@@ -236,7 +284,7 @@ public class StructBuilder {
/**
* Validate the communities section of the XML document. This returns a string
* containing any errors encountered, or null if there were no errors
* containing any errors encountered, or null if there were no errors.
*
* @param communities the NodeList of communities to validate
* @param level the level in the XML document that we are at, for the purposes
@@ -246,7 +294,7 @@ public class StructBuilder {
*/
private static String validateCommunities(NodeList communities, int level)
throws TransformerException {
StringBuffer err = new StringBuffer();
StringBuilder err = new StringBuilder();
boolean trip = false;
String errs = null;
@@ -255,8 +303,9 @@ public class StructBuilder {
NodeList name = XPathAPI.selectNodeList(n, "name");
if (name.getLength() != 1) {
String pos = Integer.toString(i + 1);
err.append("-The level " + level + " community in position " + pos);
err.append(" does not contain exactly one name field\n");
err.append("-The level ").append(level)
.append(" community in position ").append(pos)
.append(" does not contain exactly one name field.\n");
trip = true;
}
@@ -286,7 +335,7 @@ public class StructBuilder {
/**
* validate the collection section of the XML document. This generates a
* string containing any errors encountered, or returns null if no errors
* string containing any errors encountered, or returns null if no errors.
*
* @param collections a NodeList of collections to validate
* @param level the level in the XML document for the purposes of error reporting
@@ -294,7 +343,7 @@ public class StructBuilder {
*/
private static String validateCollections(NodeList collections, int level)
throws TransformerException {
StringBuffer err = new StringBuffer();
StringBuilder err = new StringBuilder();
boolean trip = false;
String errs = null;
@@ -303,8 +352,9 @@ public class StructBuilder {
NodeList name = XPathAPI.selectNodeList(n, "name");
if (name.getLength() != 1) {
String pos = Integer.toString(i + 1);
err.append("-The level " + level + " collection in position " + pos);
err.append(" does not contain exactly one name field\n");
err.append("-The level ").append(level)
.append(" collection in position ").append(pos)
.append(" does not contain exactly one name field.\n");
trip = true;
}
}
@@ -363,7 +413,7 @@ public class StructBuilder {
* created communities (e.g. the handles they have been assigned)
*/
private static Element[] handleCommunities(Context context, NodeList communities, Community parent)
throws TransformerException, SQLException, Exception {
throws TransformerException, SQLException, AuthorizeException {
Element[] elements = new Element[communities.getLength()];
for (int i = 0; i < communities.getLength(); i++) {
@@ -390,12 +440,10 @@ public class StructBuilder {
}
// FIXME: at the moment, if the community already exists by name
// then this will throw a PSQLException on a duplicate key
// violation
// Ideally we'd skip this row and continue to create sub
// communities
// and so forth where they don't exist, but it's proving
// difficult
// then this will throw an SQLException on a duplicate key
// violation.
// Ideally we'd skip this row and continue to create sub communities
// and so forth where they don't exist, but it's proving difficult
// to isolate the community that already exists without hitting
// the database directly.
communityService.update(context, community);
@@ -470,7 +518,7 @@ public class StructBuilder {
* created collections (e.g. the handle)
*/
private static Element[] handleCollections(Context context, NodeList collections, Community parent)
throws TransformerException, SQLException, AuthorizeException, IOException, Exception {
throws TransformerException, SQLException, AuthorizeException {
Element[] elements = new Element[collections.getLength()];
for (int i = 0; i < collections.getLength(); i++) {

View File

@@ -27,6 +27,7 @@ import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityValueService;
@@ -34,6 +35,7 @@ import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataValue;
import org.dspace.content.authority.Choices;
import org.dspace.content.factory.ContentServiceFactory;
@@ -198,10 +200,12 @@ public class DSpaceCSV implements Serializable {
}
// Check that the scheme exists
if (!StringUtils.equals(metadataSchema, MetadataSchemaEnum.RELATION.getName())) {
MetadataSchema foundSchema = metadataSchemaService.find(c, metadataSchema);
if (foundSchema == null) {
throw new MetadataImportInvalidHeadingException(clean[0],
MetadataImportInvalidHeadingException.SCHEMA,
MetadataImportInvalidHeadingException
.SCHEMA,
columnCounter);
}
@@ -210,9 +214,11 @@ public class DSpaceCSV implements Serializable {
.findByElement(c, foundSchema, metadataElement, metadataQualifier);
if (foundField == null) {
throw new MetadataImportInvalidHeadingException(clean[0],
MetadataImportInvalidHeadingException.ELEMENT,
MetadataImportInvalidHeadingException
.ELEMENT,
columnCounter);
}
}
// Store the heading
headings.add(authorityPrefix + element);

View File

@@ -15,6 +15,7 @@ import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.UUID;
@@ -25,22 +26,31 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityValueService;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Entity;
import org.dspace.content.EntityType;
import org.dspace.content.Item;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataValue;
import org.dspace.content.Relationship;
import org.dspace.content.RelationshipType;
import org.dspace.content.WorkspaceItem;
import org.dspace.content.authority.Choices;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.EntityService;
import org.dspace.content.service.EntityTypeService;
import org.dspace.content.service.InstallItemService;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.RelationshipService;
import org.dspace.content.service.RelationshipTypeService;
import org.dspace.content.service.WorkspaceItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
@@ -50,6 +60,7 @@ import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.util.UUIDUtils;
import org.dspace.workflow.WorkflowItem;
import org.dspace.workflow.WorkflowService;
import org.dspace.workflow.factory.WorkflowServiceFactory;
@@ -92,7 +103,7 @@ public class MetadataImport {
/**
* Logger
*/
protected static final Logger log = Logger.getLogger(MetadataImport.class);
protected static final Logger log = org.apache.logging.log4j.LogManager.getLogger(MetadataImport.class);
protected final AuthorityValueService authorityValueService;
@@ -101,6 +112,10 @@ public class MetadataImport {
protected final CollectionService collectionService;
protected final HandleService handleService;
protected final WorkspaceItemService workspaceItemService;
protected final RelationshipTypeService relationshipTypeService;
protected final RelationshipService relationshipService;
protected final EntityTypeService entityTypeService;
protected final EntityService entityService;
/**
* Create an instance of the metadata importer. Requires a context and an array of CSV lines
@@ -120,6 +135,10 @@ public class MetadataImport {
handleService = HandleServiceFactory.getInstance().getHandleService();
authorityValueService = AuthorityServiceFactory.getInstance().getAuthorityValueService();
workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService();
relationshipService = ContentServiceFactory.getInstance().getRelationshipService();
relationshipTypeService = ContentServiceFactory.getInstance().getRelationshipTypeService();
entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService();
entityService = ContentServiceFactory.getInstance().getEntityService();
}
/**
@@ -336,7 +355,17 @@ public class MetadataImport {
item = wsItem.getItem();
// Add the metadata to the item
List<BulkEditMetadataValue> relationships = new LinkedList<>();
for (BulkEditMetadataValue dcv : whatHasChanged.getAdds()) {
if (StringUtils.equals(dcv.getSchema(), MetadataSchemaEnum.RELATION.getName())) {
if (!StringUtils.equals(dcv.getElement(), "type")) {
relationships.add(dcv);
} else {
handleRelationshipMetadataValueFromBulkEditMetadataValue(item, dcv);
}
} else {
itemService.addMetadata(c, item, dcv.getSchema(),
dcv.getElement(),
dcv.getQualifier(),
@@ -345,7 +374,11 @@ public class MetadataImport {
dcv.getAuthority(),
dcv.getConfidence());
}
}
for (BulkEditMetadataValue relationship : relationships) {
handleRelationshipMetadataValueFromBulkEditMetadataValue(item, relationship);
}
// Should the workflow be used?
if (useWorkflow) {
WorkflowService workflowService = WorkflowServiceFactory.getInstance().getWorkflowService();
@@ -396,6 +429,27 @@ public class MetadataImport {
return changes;
}
/**
* This metod handles the BulkEditMetadataValue objects that correspond to Relationship metadatavalues
* @param item The item to which this metadatavalue will belong
* @param dcv The BulkEditMetadataValue to be processed
* @throws SQLException If something goes wrong
* @throws AuthorizeException If something goes wrong
*/
private void handleRelationshipMetadataValueFromBulkEditMetadataValue(Item item, BulkEditMetadataValue dcv)
throws SQLException, AuthorizeException {
LinkedList<String> values = new LinkedList<>();
values.add(dcv.getValue());
LinkedList<String> authorities = new LinkedList<>();
authorities.add(dcv.getAuthority());
LinkedList<Integer> confidences = new LinkedList<>();
confidences.add(dcv.getConfidence());
handleRelationMetadata(c, item, dcv.getSchema(), dcv.getElement(),
dcv.getQualifier(),
dcv.getLanguage(), values, authorities, confidences);
}
/**
* Compare an item metadata with a line from CSV, and optionally update the item
*
@@ -583,9 +637,251 @@ public class MetadataImport {
}
}
// Set those values
if (StringUtils.equals(schema, MetadataSchemaEnum.RELATION.getName())) {
List<RelationshipType> relationshipTypeList = relationshipTypeService
.findByLeftOrRightLabel(c, element);
for (RelationshipType relationshipType : relationshipTypeList) {
for (Relationship relationship : relationshipService
.findByItemAndRelationshipType(c, item, relationshipType)) {
relationshipService.delete(c, relationship);
relationshipService.update(c, relationship);
}
}
handleRelationMetadata(c, item, schema, element, qualifier, language, values, authorities, confidences);
} else {
itemService.clearMetadata(c, item, schema, element, qualifier, language);
itemService.addMetadata(c, item, schema, element, qualifier, language, values, authorities, confidences);
itemService.addMetadata(c, item, schema, element, qualifier,
language, values, authorities, confidences);
itemService.update(c, item);
}
}
}
/**
* This method decides whether the metadatavalue is of type relation.type or if it corresponds to
* a relationship and handles it accordingly to their respective methods
* @param c The relevant DSpace context
* @param item The item to which this metadatavalue belongs to
* @param schema The schema for the metadatavalue
* @param element The element for the metadatavalue
* @param qualifier The qualifier for the metadatavalue
* @param language The language for the metadatavalue
* @param values The values for the metadatavalue
* @param authorities The authorities for the metadatavalue
* @param confidences The confidences for the metadatavalue
* @throws SQLException If something goes wrong
* @throws AuthorizeException If something goes wrong
*/
private void handleRelationMetadata(Context c, Item item, String schema, String element, String qualifier,
String language, List<String> values, List<String> authorities,
List<Integer> confidences) throws SQLException, AuthorizeException {
if (StringUtils.equals(element, "type") && StringUtils.isBlank(qualifier)) {
handleRelationTypeMetadata(c, item, schema, element, qualifier, language, values, authorities, confidences);
} else {
for (String value : values) {
handleRelationOtherMetadata(c, item, element, value);
}
}
}
/**
* This method takes the item, element and values to determine what relationships should be built
* for these parameters and calls on the method to construct them
* @param c The relevant DSpace context
* @param item The item that the relationships will be made for
* @param element The string determining which relationshiptype is to be used
* @param value The value for the relationship
* @throws SQLException If something goes wrong
* @throws AuthorizeException If something goes wrong
*/
private void handleRelationOtherMetadata(Context c, Item item, String element, String value)
throws SQLException, AuthorizeException {
Entity entity = entityService.findByItemId(c, item.getID());
boolean left = false;
List<RelationshipType> acceptableRelationshipTypes = new LinkedList<>();
String url = handleService.resolveToURL(c, value);
UUID uuid = UUIDUtils.fromString(value);
if (uuid == null && StringUtils.isNotBlank(url)) {
return;
}
Entity relationEntity = entityService.findByItemId(c, uuid);
List<RelationshipType> leftRelationshipTypesForEntity = entityService.getLeftRelationshipTypes(c, entity);
List<RelationshipType> rightRelationshipTypesForEntity = entityService.getRightRelationshipTypes(c, entity);
for (RelationshipType relationshipType : entityService.getAllRelationshipTypes(c, entity)) {
if (StringUtils.equalsIgnoreCase(relationshipType.getLeftLabel(), element)) {
left = handleLeftLabelEqualityRelationshipTypeElement(c, entity, relationEntity, left,
acceptableRelationshipTypes,
leftRelationshipTypesForEntity,
relationshipType);
} else if (StringUtils.equalsIgnoreCase(relationshipType.getRightLabel(), element)) {
left = handleRightLabelEqualityRelationshipTypeElement(c, entity, relationEntity, left,
acceptableRelationshipTypes,
rightRelationshipTypesForEntity,
relationshipType);
}
}
if (acceptableRelationshipTypes.size() > 1) {
log.error("Ambiguous relationship_types were found");
return;
}
if (acceptableRelationshipTypes.size() == 0) {
log.error("no relationship_types were found");
return;
}
//There is exactly one
buildRelationObject(c, item, value, left, acceptableRelationshipTypes.get(0));
}
/**
* This method creates the relationship for the item and stores it in the database
* @param c The relevant DSpace context
* @param item The item for which this relationship will be constructed
* @param value The value for the relationship
* @param left A boolean indicating whether the item is the leftItem or the rightItem
* @param acceptedRelationshipType The acceptable relationship type
* @throws SQLException If something goes wrong
* @throws AuthorizeException If something goes wrong
*/
private void buildRelationObject(Context c, Item item, String value, boolean left,
RelationshipType acceptedRelationshipType)
throws SQLException, AuthorizeException {
Item leftItem = null;
Item rightItem = null;
if (left) {
leftItem = item;
rightItem = itemService.findByIdOrLegacyId(c, value);
} else {
rightItem = item;
leftItem = itemService.findByIdOrLegacyId(c, value);
}
RelationshipType relationshipType = acceptedRelationshipType;
int leftPlace = relationshipService.findLeftPlaceByLeftItem(c, leftItem) + 1;
int rightPlace = relationshipService.findRightPlaceByRightItem(c, rightItem) + 1;
Relationship persistedRelationship = relationshipService.create(c, leftItem, rightItem,
relationshipType, leftPlace, rightPlace);
relationshipService.update(c, persistedRelationship);
}
/**
* This method will add RelationshipType objects to the acceptableRelationshipTypes list
* if applicable and valid RelationshipType objects are found. It will also return a boolean indicating
* whether we're dealing with a left Relationship or not
* @param c The relevant DSpace context
* @param entity The Entity for which the RelationshipType has to be checked
* @param relationEntity The other Entity of the Relationship
* @param left Boolean indicating whether the Relationship is left or not
* @param acceptableRelationshipTypes The list of RelationshipType objects that will be added to
* @param rightRelationshipTypesForEntity The list of RelationshipType objects that are possible
* for the right entity
* @param relationshipType The RelationshipType object that we want to check whether it's
* valid to be added or not
* @return A boolean indicating whether the relationship is left or right, will
* be false in this case
* @throws SQLException If something goes wrong
*/
private boolean handleRightLabelEqualityRelationshipTypeElement(Context c, Entity entity, Entity relationEntity,
boolean left,
List<RelationshipType> acceptableRelationshipTypes,
List<RelationshipType>
rightRelationshipTypesForEntity,
RelationshipType relationshipType)
throws SQLException {
if (StringUtils.equalsIgnoreCase(entityService.getType(c, entity).getLabel(),
relationshipType.getRightType().getLabel()) &&
StringUtils.equalsIgnoreCase(entityService.getType(c, relationEntity).getLabel(),
relationshipType.getLeftType().getLabel())) {
for (RelationshipType rightRelationshipType : rightRelationshipTypesForEntity) {
if (StringUtils.equalsIgnoreCase(rightRelationshipType.getLeftType().getLabel(),
relationshipType.getLeftType().getLabel()) ||
StringUtils.equalsIgnoreCase(rightRelationshipType.getRightType().getLabel(),
relationshipType.getLeftType().getLabel())) {
left = false;
acceptableRelationshipTypes.add(relationshipType);
}
}
}
return left;
}
/**
* This method will add RelationshipType objects to the acceptableRelationshipTypes list
* if applicable and valid RelationshipType objects are found. It will also return a boolean indicating
* whether we're dealing with a left Relationship or not
* @param c The relevant DSpace context
* @param entity The Entity for which the RelationshipType has to be checked
* @param relationEntity The other Entity of the Relationship
* @param left Boolean indicating whether the Relationship is left or not
* @param acceptableRelationshipTypes The list of RelationshipType objects that will be added to
* @param leftRelationshipTypesForEntity The list of RelationshipType objects that are possible
* for the left entity
* @param relationshipType The RelationshipType object that we want to check whether it's
* valid to be added or not
* @return A boolean indicating whether the relationship is left or right, will
* be true in this case
* @throws SQLException If something goes wrong
*/
private boolean handleLeftLabelEqualityRelationshipTypeElement(Context c, Entity entity, Entity relationEntity,
boolean left,
List<RelationshipType> acceptableRelationshipTypes,
List<RelationshipType>
leftRelationshipTypesForEntity,
RelationshipType relationshipType)
throws SQLException {
if (StringUtils.equalsIgnoreCase(entityService.getType(c, entity).getLabel(),
relationshipType.getLeftType().getLabel()) &&
StringUtils.equalsIgnoreCase(entityService.getType(c, relationEntity).getLabel(),
relationshipType.getRightType().getLabel())) {
for (RelationshipType leftRelationshipType : leftRelationshipTypesForEntity) {
if (StringUtils.equalsIgnoreCase(leftRelationshipType.getRightType().getLabel(),
relationshipType.getRightType().getLabel()) ||
StringUtils.equalsIgnoreCase(leftRelationshipType.getLeftType().getLabel(),
relationshipType.getRightType().getLabel())) {
left = true;
acceptableRelationshipTypes.add(relationshipType);
}
}
}
return left;
}
/**
* This method will add the relationship.type metadata to the item if an EntityType can be found for the value in
* the values list.
* @param c The relevant DSpace context
* @param item The item to which this metadatavalue will be added
* @param schema The schema for the metadatavalue to be added
* @param element The element for the metadatavalue to be added
* @param qualifier The qualifier for the metadatavalue to be added
* @param language The language for the metadatavalue to be added
* @param values The value on which we'll search for EntityType object and it's the value
* for the metadatavalue that will be created
* @param authorities The authority for the metadatavalue. This will be filled with the ID
* of the found EntityType for the value if it exists
* @param confidences The confidence for the metadatavalue
* @throws SQLException If something goes wrong
* @throws AuthorizeException If something goes wrong
*/
private void handleRelationTypeMetadata(Context c, Item item, String schema, String element, String qualifier,
String language, List<String> values, List<String> authorities,
List<Integer> confidences)
throws SQLException, AuthorizeException {
EntityType entityType = entityTypeService.findByEntityType(c, values.get(0));
if (entityType != null) {
authorities.add(String.valueOf(entityType.getID()));
itemService.clearMetadata(c, item, schema, element, qualifier, language);
itemService.addMetadata(c, item, schema, element, qualifier, language,
values, authorities, confidences);
itemService.update(c, item);
}
}

View File

@@ -23,7 +23,8 @@ import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.checker.BitstreamDispatcher;
import org.dspace.checker.CheckerCommand;
import org.dspace.checker.HandleDispatcher;
@@ -48,7 +49,7 @@ import org.dspace.core.Utils;
* @author Nathan Sarr
*/
public final class ChecksumChecker {
private static final Logger LOG = Logger.getLogger(ChecksumChecker.class);
private static final Logger LOG = LogManager.getLogger(ChecksumChecker.class);
private static final BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();

View File

@@ -32,8 +32,8 @@ import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import javax.mail.MessagingException;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.app.itemexport.service.ItemExportService;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
@@ -42,7 +42,7 @@ import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataValue;
import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.CommunityService;
@@ -98,7 +98,7 @@ public class ItemExportServiceImpl implements ItemExportService {
/**
* log4j logger
*/
private Logger log = Logger.getLogger(ItemExportServiceImpl.class);
private Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemExportServiceImpl.class);
protected ItemExportServiceImpl() {
@@ -214,7 +214,7 @@ public class ItemExportServiceImpl implements ItemExportService {
protected void writeMetadata(Context c, String schema, Item i,
File destDir, boolean migrate) throws Exception {
String filename;
if (schema.equals(MetadataSchema.DC_SCHEMA)) {
if (schema.equals(MetadataSchemaEnum.DC.getName())) {
filename = "dublin_core.xml";
} else {
filename = "metadata_" + schema + ".xml";
@@ -271,9 +271,8 @@ public class ItemExportServiceImpl implements ItemExportService {
("date".equals(metadataField.getElement()) && "accessioned".equals(qualifier)) ||
("date".equals(metadataField.getElement()) && "available".equals(qualifier)) ||
("identifier".equals(metadataField.getElement()) && "uri".equals(qualifier) &&
(dcv.getValue() != null && dcv.getValue().startsWith("http://hdl.handle.net/" +
handleService
.getPrefix() + "/"))) ||
(dcv.getValue() != null && dcv.getValue().startsWith(
handleService.getCanonicalPrefix() + handleService.getPrefix() + "/"))) ||
("description".equals(metadataField.getElement()) && "provenance".equals(qualifier)) ||
("format".equals(metadataField.getElement()) && "extent".equals(qualifier)) ||
("format".equals(metadataField.getElement()) && "mimetype".equals(qualifier))))) {
@@ -547,7 +546,7 @@ public class ItemExportServiceImpl implements ItemExportService {
List<Bitstream> bitstreams = bundle.getBitstreams();
for (Bitstream bitstream : bitstreams) {
// add up the size
size += bitstream.getSize();
size += bitstream.getSizeBytes();
}
}
items.add(item.getID());
@@ -574,7 +573,7 @@ public class ItemExportServiceImpl implements ItemExportService {
List<Bitstream> bitstreams = bundle.getBitstreams();
for (Bitstream bitstream : bitstreams) {
// add up the size
size += bitstream.getSize();
size += bitstream.getSizeBytes();
}
}
items.add(item.getID());
@@ -593,7 +592,7 @@ public class ItemExportServiceImpl implements ItemExportService {
List<Bitstream> bitstreams = bundle.getBitstreams();
for (Bitstream bitstream : bitstreams) {
// add up the size
size += bitstream.getSize();
size += bitstream.getSizeBytes();
}
}
ArrayList<UUID> items = new ArrayList<>();

View File

@@ -52,13 +52,13 @@ import gr.ekt.bte.core.TransformationSpec;
import gr.ekt.bte.dataloader.FileDataLoader;
import gr.ekt.bteio.generators.DSpaceOutputGenerator;
import gr.ekt.bteio.loaders.OAIPMHDataLoader;
import org.apache.commons.collections.ComparatorUtils;
import org.apache.commons.collections4.ComparatorUtils;
import org.apache.commons.io.FileDeleteStrategy;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.apache.xpath.XPathAPI;
import org.dspace.app.itemimport.service.ItemImportService;
import org.dspace.app.util.LocalSchemaFilenameFilter;
@@ -74,6 +74,7 @@ import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.WorkspaceItem;
import org.dspace.content.service.BitstreamFormatService;
import org.dspace.content.service.BitstreamService;
@@ -124,7 +125,7 @@ import org.xml.sax.SAXException;
* allow the registration of files (bitstreams) into DSpace.
*/
public class ItemImportServiceImpl implements ItemImportService, InitializingBean {
private final Logger log = Logger.getLogger(ItemImportServiceImpl.class);
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemImportServiceImpl.class);
@Autowired(required = true)
protected AuthorizeService authorizeService;
@@ -677,7 +678,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
Node schemaAttr = metadata.item(0).getAttributes().getNamedItem(
"schema");
if (schemaAttr == null) {
schema = MetadataSchema.DC_SCHEMA;
schema = MetadataSchemaEnum.DC.getName();
} else {
schema = schemaAttr.getNodeValue();
}

View File

@@ -30,7 +30,7 @@ import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.dspace.app.util.LocalSchemaFilenameFilter;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
@@ -47,7 +47,7 @@ import org.w3c.dom.Document;
* Encapsulates the Item in the context of the DSpace Archive Format
*/
public class ItemArchive {
private static final Logger log = Logger.getLogger(ItemArchive.class);
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemArchive.class);
public static final String DUBLIN_CORE_XML = "dublin_core.xml";

View File

@@ -31,11 +31,12 @@ import org.apache.commons.cli.PosixParser;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
/**
* Provides some batch editing capabilities for items in DSpace:
@@ -78,6 +79,7 @@ public class ItemUpdate {
protected static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService();
protected static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
protected static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
static {
filterAliases.put("ORIGINAL", "org.dspace.app.itemupdate.OriginalBitstreamFilter");
@@ -330,10 +332,7 @@ public class ItemUpdate {
iu.setEPerson(context, iu.eperson);
context.turnOffAuthorisationSystem();
HANDLE_PREFIX = ConfigurationManager.getProperty("handle.canonical.prefix");
if (HANDLE_PREFIX == null || HANDLE_PREFIX.length() == 0) {
HANDLE_PREFIX = "http://hdl.handle.net/";
}
HANDLE_PREFIX = handleService.getCanonicalPrefix();
iu.processArchive(context, sourcedir, itemField, metadataIndexName, alterProvenance, isTest);

View File

@@ -28,12 +28,13 @@ import javax.xml.transform.TransformerException;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataValue;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
@@ -189,7 +190,7 @@ public class MetadataUtilities {
NodeList metadata = XPathAPI.selectNodeList(document, "/dublin_core");
Node schemaAttr = metadata.item(0).getAttributes().getNamedItem("schema");
if (schemaAttr == null) {
schema = MetadataSchema.DC_SCHEMA;
schema = MetadataSchemaEnum.DC.getName();
} else {
schema = schemaAttr.getNodeValue();
}

View File

@@ -11,7 +11,7 @@ import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.IOUtils;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory;
import org.apache.poi.hssf.extractor.ExcelExtractor;
@@ -36,7 +36,7 @@ import org.dspace.content.Item;
*/
public class ExcelFilter extends MediaFilter {
private static Logger log = Logger.getLogger(ExcelFilter.class);
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(ExcelFilter.class);
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";

View File

@@ -143,7 +143,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
// PDFs using the CMYK color system can be handled specially if
// profiles are defined
if (cmyk_profile != null && srgb_profile != null) {
Info imageInfo = new Info(f.getAbsolutePath(), true);
Info imageInfo = new Info(f.getAbsolutePath() + s, true);
String imageClass = imageInfo.getImageClass();
if (imageClass.contains("CMYK")) {
op.profile(cmyk_profile);

View File

@@ -22,7 +22,7 @@ import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory;
import org.dspace.app.mediafilter.service.MediaFilterService;
import org.dspace.content.Collection;

View File

@@ -220,7 +220,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
} catch (Exception e) {
String handle = myItem.getHandle();
List<Bundle> bundles = myBitstream.getBundles();
long size = myBitstream.getSize();
long size = myBitstream.getSizeBytes();
String checksum = myBitstream.getChecksum() + " (" + myBitstream.getChecksumAlgorithm() + ")";
int assetstore = myBitstream.getStoreNumber();
@@ -310,12 +310,11 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
// get bitstream filename, calculate destination filename
String newName = formatFilter.getFilteredName(source.getName());
Bitstream existingBitstream = null; // is there an existing rendition?
Bundle targetBundle = null; // bundle we're modifying
// check if destination bitstream exists
Bundle existingBundle = null;
Bitstream existingBitstream = null;
List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName());
// check if destination bitstream exists
if (bundles.size() > 0) {
// only finds the last match (FIXME?)
for (Bundle bundle : bundles) {
@@ -323,7 +322,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
for (Bitstream bitstream : bitstreams) {
if (bitstream.getName().trim().equals(newName.trim())) {
targetBundle = bundle;
existingBundle = bundle;
existingBitstream = bitstream;
}
}
@@ -345,40 +344,41 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
+ " (item: " + item.getHandle() + ")");
}
InputStream destStream;
try {
System.out.println("File: " + newName);
destStream = formatFilter.getDestinationStream(item, bitstreamService.retrieve(context, source), isVerbose);
// start filtering of the bitstream, using try with resource to close all InputStreams properly
try (
// get the source stream
InputStream srcStream = bitstreamService.retrieve(context, source);
// filter the source stream to produce the destination stream
// this is the hard work, check for OutOfMemoryErrors at the end of the try clause.
InputStream destStream = formatFilter.getDestinationStream(item, srcStream, isVerbose);
) {
if (destStream == null) {
if (!isQuiet) {
System.out.println("SKIPPED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") because filtering was unsuccessful");
}
return false;
}
} catch (OutOfMemoryError oome) {
System.out.println("!!! OutOfMemoryError !!!");
return false;
}
// create new bundle if needed
Bundle targetBundle; // bundle we're modifying
if (bundles.size() < 1) {
// create new bundle if needed
targetBundle = bundleService.create(context, item, formatFilter.getBundleName());
} else {
// take the first match
// take the first match as we already looked out for the correct bundle name
targetBundle = bundles.get(0);
}
// create bitstream to store the filter result
Bitstream b = bitstreamService.create(context, targetBundle, destStream);
// Now set the format and name of the bitstream
// set the name, source and description of the bitstream
b.setName(context, newName);
b.setSource(context, "Written by FormatFilter " + formatFilter.getClass().getName() +
" on " + DCDate.getCurrent() + " (GMT).");
b.setDescription(context, formatFilter.getDescription());
// Find the proper format
// Set the format of the bitstream
BitstreamFormat bf = bitstreamFormatService.findByShortDescription(context,
formatFilter.getFormatString());
bitstreamService.setFormat(context, b, bf);
@@ -398,10 +398,17 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
authorizeService.inheritPolicies(context, source, b);
}
//do post-processing of the generated bitstream
formatFilter.postProcessBitstream(context, item, b);
} catch (OutOfMemoryError oome) {
System.out.println("!!! OutOfMemoryError !!!");
}
// fixme - set date?
// we are overwriting, so remove old bitstream
if (existingBitstream != null) {
bundleService.removeBitstream(context, targetBundle, existingBitstream);
bundleService.removeBitstream(context, existingBundle, existingBitstream);
}
if (!isQuiet) {
@@ -409,9 +416,6 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
+ " (item: " + item.getHandle() + ") and created '" + newName + "'");
}
//do post-processing of the generated bitstream
formatFilter.postProcessBitstream(context, item, b);
return true;
}

View File

@@ -16,7 +16,7 @@ import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.text.PDFTextStripper;
import org.dspace.content.Item;
@@ -30,7 +30,7 @@ import org.dspace.core.ConfigurationManager;
*/
public class PDFFilter extends MediaFilter {
private static Logger log = Logger.getLogger(PDFFilter.class);
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PDFFilter.class);
@Override
public String getFilteredName(String oldFilename) {

View File

@@ -10,7 +10,7 @@ package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory;
import org.apache.poi.hslf.extractor.PowerPointExtractor;
@@ -23,7 +23,7 @@ import org.dspace.content.Item;
*/
public class PowerPointFilter extends MediaFilter {
private static Logger log = Logger.getLogger(PowerPointFilter.class);
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PowerPointFilter.class);
@Override
public String getFilteredName(String oldFilename) {

View File

@@ -1,93 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import org.apache.log4j.Logger;
import org.dspace.content.Item;
import org.textmining.extraction.TextExtractor;
import org.textmining.extraction.word.WordTextExtractorFactory;
/*
*
* to do: helpful error messages - can't find mediafilter.cfg - can't
* instantiate filter - bitstream format doesn't exist.
*
*/
public class WordFilter extends MediaFilter {
private static Logger log = Logger.getLogger(WordFilter.class);
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";
}
/**
* @return String bundle name
*/
@Override
public String getBundleName() {
return "TEXT";
}
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString() {
return "Text";
}
/**
* @return String description
*/
@Override
public String getDescription() {
return "Extracted text";
}
/**
* @param currentItem item
* @param source source input stream
* @param verbose verbose mode
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
// get input stream from bitstream
// pass to filter, get string back
try {
WordTextExtractorFactory factory = new WordTextExtractorFactory();
TextExtractor e = factory.textExtractor(source);
String extractedText = e.getText();
// if verbose flag is set, print out extracted text
// to STDOUT
if (verbose) {
System.out.println(extractedText);
}
// generate an input stream with the extracted text
byte[] textBytes = extractedText.getBytes();
ByteArrayInputStream bais = new ByteArrayInputStream(textBytes);
return bais; // will this work? or will the byte array be out of scope?
} catch (IOException ioe) {
System.out.println("Invalid Word Format");
log.error("Error detected - Word File format not recognized: "
+ ioe.getMessage(), ioe);
throw ioe;
}
}
}

View File

@@ -9,8 +9,8 @@ package org.dspace.app.requestitem;
import java.sql.SQLException;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
@@ -31,7 +31,7 @@ import org.springframework.beans.factory.annotation.Autowired;
*/
public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
private Logger log = Logger.getLogger(RequestItemHelpdeskStrategy.class);
private Logger log = org.apache.logging.log4j.LogManager.getLogger(RequestItemHelpdeskStrategy.class);
@Autowired(required = true)
protected EPersonService ePersonService;

View File

@@ -10,7 +10,7 @@ package org.dspace.app.requestitem;
import java.sql.SQLException;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.dspace.content.Item;
import org.dspace.content.MetadataValue;
import org.dspace.content.service.ItemService;

View File

@@ -10,7 +10,7 @@ package org.dspace.app.requestitem;
import java.sql.SQLException;
import java.util.Date;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.dspace.app.requestitem.dao.RequestItemDAO;
import org.dspace.app.requestitem.service.RequestItemService;
import org.dspace.content.Bitstream;
@@ -28,7 +28,7 @@ import org.springframework.beans.factory.annotation.Autowired;
*/
public class RequestItemServiceImpl implements RequestItemService {
private final Logger log = Logger.getLogger(RequestItemServiceImpl.class);
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(RequestItemServiceImpl.class);
@Autowired(required = true)
protected RequestItemDAO requestItemDAO;

View File

@@ -8,13 +8,15 @@
package org.dspace.app.requestitem.dao.impl;
import java.sql.SQLException;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root;
import org.dspace.app.requestitem.RequestItem;
import org.dspace.app.requestitem.RequestItem_;
import org.dspace.app.requestitem.dao.RequestItemDAO;
import org.dspace.core.AbstractHibernateDAO;
import org.dspace.core.Context;
import org.hibernate.Criteria;
import org.hibernate.criterion.Restrictions;
/**
* Hibernate implementation of the Database Access Object interface class for the RequestItem object.
@@ -30,9 +32,12 @@ public class RequestItemDAOImpl extends AbstractHibernateDAO<RequestItem> implem
@Override
public RequestItem findByToken(Context context, String token) throws SQLException {
Criteria criteria = createCriteria(context, RequestItem.class);
criteria.add(Restrictions.eq("token", token));
return uniqueResult(criteria);
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, RequestItem.class);
Root<RequestItem> requestItemRoot = criteriaQuery.from(RequestItem.class);
criteriaQuery.select(requestItemRoot);
criteriaQuery.where(criteriaBuilder.equal(requestItemRoot.get(RequestItem_.token), token));
return uniqueResult(context, criteriaQuery, false, RequestItem.class, -1, -1);
}

View File

@@ -15,8 +15,8 @@ import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.app.sfx.service.SFXFileReaderService;
import org.dspace.content.DCPersonName;
import org.dspace.content.Item;
@@ -58,7 +58,7 @@ public class SFXFileReaderServiceImpl implements SFXFileReaderService {
/**
* log4j logger
*/
private final Logger log = Logger.getLogger(SFXFileReaderServiceImpl.class);
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(SFXFileReaderServiceImpl.class);
protected SFXFileReaderServiceImpl() {
}

View File

@@ -13,7 +13,7 @@ import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.dspace.app.util.XMLUtils;
import org.w3c.dom.Document;
import org.w3c.dom.Element;

View File

@@ -7,7 +7,7 @@
*/
package org.dspace.app.sherpa;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
@@ -16,7 +16,7 @@ import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.dspace.core.ConfigurationManager;
public class SHERPAService {
@@ -29,7 +29,7 @@ public class SHERPAService {
/**
* log4j category
*/
private static final Logger log = Logger.getLogger(SHERPAService.class);
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SHERPAService.class);
public SHERPAService() {
HttpClientBuilder builder = HttpClientBuilder.create();

View File

@@ -11,8 +11,8 @@ import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.app.sherpa.SHERPAResponse;
import org.dspace.app.sherpa.SHERPAService;
import org.dspace.content.Item;
@@ -27,7 +27,7 @@ public class SHERPASubmitService {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(SHERPASubmitService.class);
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(SHERPASubmitService.class);
public void setConfiguration(SHERPASubmitConfigurationService configuration) {
this.configuration = configuration;

View File

@@ -27,9 +27,9 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
@@ -52,7 +52,7 @@ public class GenerateSitemaps {
/**
* Logger
*/
private static Logger log = Logger.getLogger(GenerateSitemaps.class);
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(GenerateSitemaps.class);
private static final CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
private static final CollectionService collectionService =

View File

@@ -28,7 +28,7 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.dspace.content.Item;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataValue;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
@@ -763,9 +763,10 @@ public class ReportGenerator {
// build the referece
// FIXME: here we have blurred the line between content and presentation
// and it should probably be un-blurred
List<MetadataValue> title = itemService.getMetadata(item, MetadataSchema.DC_SCHEMA, "title", null, Item.ANY);
List<MetadataValue> title = itemService.getMetadata(item, MetadataSchemaEnum.DC.getName(),
"title", null, Item.ANY);
List<MetadataValue> author = itemService
.getMetadata(item, MetadataSchema.DC_SCHEMA, "contributor", "author", Item.ANY);
.getMetadata(item, MetadataSchemaEnum.DC.getName(), "contributor", "author", Item.ANY);
StringBuffer authors = new StringBuffer();
if (author.size() > 0) {

View File

@@ -21,7 +21,7 @@ import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang.time.DateUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.dspace.core.ConfigurationManager;
/**

View File

@@ -13,8 +13,8 @@ import java.util.Map;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import org.apache.commons.lang.StringUtils;
import org.dspace.content.MetadataSchema;
import org.apache.commons.lang3.StringUtils;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.core.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -63,6 +63,12 @@ public class DCInput {
*/
private String label = null;
/**
* a style instruction to apply to the input. The exact way to use the style value is UI depending that receive the
* value from the REST API as is
*/
private String style = null;
/**
* the input type
*/
@@ -154,7 +160,7 @@ public class DCInput {
// Default the schema to dublin core
dcSchema = fieldMap.get("dc-schema");
if (dcSchema == null) {
dcSchema = MetadataSchema.DC_SCHEMA;
dcSchema = MetadataSchemaEnum.DC.getName();
}
//check if the input have a language tag
@@ -199,7 +205,7 @@ public class DCInput {
typeBind.add(type.trim());
}
}
style = fieldMap.get("style");
}
/**
@@ -262,7 +268,7 @@ public class DCInput {
}
/**
* Get the DC element for this form row.
* Get the DC element for this form field.
*
* @return the DC element
*/
@@ -271,7 +277,7 @@ public class DCInput {
}
/**
* Get the DC namespace prefix for this form row.
* Get the DC namespace prefix for this form field.
*
* @return the DC namespace prefix
*/
@@ -290,7 +296,7 @@ public class DCInput {
}
/**
* Is there a required string for this form row?
* Is there a required string for this form field?
*
* @return true if a required string is set
*/
@@ -299,7 +305,7 @@ public class DCInput {
}
/**
* Get the DC qualifier for this form row.
* Get the DC qualifier for this form field.
*
* @return the DC qualifier
*/
@@ -308,7 +314,7 @@ public class DCInput {
}
/**
* Get the language for this form row.
* Get the language for this form field.
*
* @return the language state
*/
@@ -317,7 +323,7 @@ public class DCInput {
}
/**
* Get the hint for this form row, formatted for an HTML table
* Get the hint for this form field
*
* @return the hints
*/
@@ -326,7 +332,7 @@ public class DCInput {
}
/**
* Get the label for this form row.
* Get the label for this form field.
*
* @return the label
*/
@@ -334,6 +340,15 @@ public class DCInput {
return label;
}
/**
* Get the style for this form field
*
* @return the style
*/
public String getStyle() {
return style;
}
/**
* Get the name of the pairs type
*

View File

@@ -25,25 +25,26 @@ public class DCInputSet {
/**
* the inputs ordered by row position
*/
private DCInput[] inputs = null;
private DCInput[][] inputs = null;
/**
* constructor
*
* @param formName form name
* @param headings
* @param mandatoryFlags
* @param fields fields
* @param rows the rows
* @param listMap map
*/
public DCInputSet(String formName,
List<Map<String, String>> fields, Map<String, List<String>> listMap) {
public DCInputSet(String formName, List<List<Map<String, String>>> rows, Map<String, List<String>> listMap) {
this.formName = formName;
this.inputs = new DCInput[fields.size()];
this.inputs = new DCInput[rows.size()][];
for (int i = 0; i < inputs.length; i++) {
Map<String, String> field = fields.get(i);
inputs[i] = new DCInput(field, listMap);
List<Map<String, String>> fields = rows.get(i);
inputs[i] = new DCInput[fields.size()];
for (int j = 0; j < inputs[i].length; j++) {
Map<String, String> field = rows.get(i).get(j);
inputs[i][j] = new DCInput(field, listMap);
}
}
}
@@ -71,7 +72,7 @@ public class DCInputSet {
* @return an array containing the fields
*/
public DCInput[] getFields() {
public DCInput[][] getFields() {
return inputs;
}
@@ -104,12 +105,14 @@ public class DCInputSet {
*/
public boolean isFieldPresent(String fieldName) {
for (int i = 0; i < inputs.length; i++) {
DCInput field = inputs[i];
for (int j = 0; j < inputs[i].length; j++) {
DCInput field = inputs[i][j];
String fullName = field.getFieldName();
if (fullName.equals(fieldName)) {
return true;
}
}
}
return false;
}
@@ -127,7 +130,8 @@ public class DCInputSet {
documentType = "";
}
for (int i = 0; i < inputs.length; i++) {
DCInput field = inputs[i];
for (int j = 0; j < inputs[i].length; j++) {
DCInput field = inputs[i][j];
String fullName = field.getFieldName();
if (fullName.equals(fieldName)) {
if (field.isAllowedFor(documentType)) {
@@ -135,6 +139,7 @@ public class DCInputSet {
}
}
}
}
return false;
}

View File

@@ -21,7 +21,7 @@ import javax.xml.parsers.FactoryConfigurationError;
import org.apache.commons.lang3.StringUtils;
import org.dspace.content.Collection;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.core.Utils;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.w3c.dom.Document;
@@ -74,7 +74,7 @@ public class DCInputsReader {
* Reference to the forms definitions map, computed from the forms
* definition file
*/
private Map<String, List<Map<String, String>>> formDefns = null;
private Map<String, List<List<Map<String, String>>>> formDefns = null;
/**
* Reference to the value-pairs map, computed from the forms definition file
@@ -115,7 +115,7 @@ public class DCInputsReader {
private void buildInputs(String fileName)
throws DCInputsReaderException {
formDefns = new HashMap<String, List<Map<String, String>>>();
formDefns = new HashMap<String, List<List<Map<String, String>>>>();
valuePairs = new HashMap<String, List<String>>();
String uri = "file:" + new File(fileName).getAbsolutePath();
@@ -212,7 +212,7 @@ public class DCInputsReader {
return lastInputSet;
}
// cache miss - construct new DCInputSet
List<Map<String, String>> pages = formDefns.get(formName);
List<List<Map<String, String>>> pages = formDefns.get(formName);
if (pages == null) {
throw new DCInputsReaderException("Missing the " + formName + " form");
}
@@ -292,8 +292,8 @@ public class DCInputsReader {
/**
* Process the form-definitions section of the XML file. Each element is
* formed thusly: <form name="formname">...pages...</form> Each pages
* subsection is formed: <page number="#"> ...fields... </page> Each field
* formed thusly: <form name="formname">...row...</form> Each rows
* subsection is formed: <row> ...fields... </row> Each field
* is formed from: dc-element, dc-qualifier, label, hint, input-type name,
* required text, and repeatable flag.
*/
@@ -311,10 +311,40 @@ public class DCInputsReader {
if (formName == null) {
throw new SAXException("form element has no name attribute");
}
List<Map<String, String>> fields = new ArrayList<Map<String, String>>(); // the form contains fields
formDefns.put(formName, fields);
List<List<Map<String, String>>> rows = new ArrayList<List<Map<String, String>>>(); // the form
// contains rows of fields
formDefns.put(formName, rows);
NodeList pl = nd.getChildNodes();
int lenpg = pl.getLength();
for (int j = 0; j < lenpg; j++) {
Node npg = pl.item(j);
if (npg.getNodeName().equals("row")) {
List<Map<String, String>> fields = new ArrayList<Map<String, String>>(); // the fields in the
// row
// process each row definition
processRow(formName, j, npg, fields);
rows.add(fields);
}
}
// sanity check number of fields
if (rows.size() < 1) {
throw new DCInputsReaderException("Form " + formName + " has no rows");
}
}
}
if (numForms == 0) {
throw new DCInputsReaderException("No form definition found");
}
}
/**
* Process parts of a row
*/
private void processRow(String formName, int rowIdx, Node n, List<Map<String, String>> fields)
throws SAXException, DCInputsReaderException {
NodeList pl = n.getChildNodes();
int lenpg = pl.getLength();
for (int j = 0; j < lenpg; j++) {
Node npg = pl.item(j);
@@ -323,6 +353,20 @@ public class DCInputsReader {
Map<String, String> field = new HashMap<String, String>();
processField(formName, npg, field);
fields.add(field);
String key = field.get(PAIR_TYPE_NAME);
if (StringUtils
.isNotBlank(key)) {
String schema = field.get("dc-schema");
String element = field.get("dc-element");
String qualifier = field
.get("dc-qualifier");
String metadataField = schema + "."
+ element;
if (StringUtils.isNotBlank(qualifier)) {
metadataField += "." + qualifier;
}
}
// we omit the duplicate validation, allowing multiple
// fields definition for
// the same metadata and different visibility/type-bind
@@ -330,15 +374,11 @@ public class DCInputsReader {
}
// sanity check number of fields
if (fields.size() < 1) {
throw new DCInputsReaderException("Form " + formName + " has no fields");
}
}
}
if (numForms == 0) {
throw new DCInputsReaderException("No form definition found");
throw new DCInputsReaderException("Form " + formName + "row " + rowIdx + " has no fields");
}
}
/**
* Process parts of a field
* At the end, make sure that input-types 'qualdrop_value' and
@@ -424,7 +464,7 @@ public class DCInputsReader {
String elem = field.get("dc-element");
String qual = field.get("dc-qualifier");
if ((schema == null) || (schema.equals(""))) {
schema = MetadataSchema.DC_SCHEMA;
schema = MetadataSchemaEnum.DC.getName();
}
String schemaTest;
@@ -434,7 +474,7 @@ public class DCInputsReader {
Map<String, String> fld = pg.get(j);
if ((fld.get("dc-schema") == null) ||
((fld.get("dc-schema")).equals(""))) {
schemaTest = MetadataSchema.DC_SCHEMA;
schemaTest = MetadataSchemaEnum.DC.getName();
} else {
schemaTest = fld.get("dc-schema");
}
@@ -537,7 +577,9 @@ public class DCInputsReader {
Iterator<String> ki = formDefns.keySet().iterator();
while (ki.hasNext()) {
String idName = ki.next();
List<Map<String, String>> fields = formDefns.get(idName);
List<List<Map<String, String>>> rows = formDefns.get(idName);
for (int j = 0; j < rows.size(); j++) {
List<Map<String, String>> fields = rows.get(j);
for (int i = 0; i < fields.size(); i++) {
Map<String, String> fld = fields.get(i);
// verify reference in certain input types
@@ -555,8 +597,9 @@ public class DCInputsReader {
// we omit the "required" and "visibility" validation, provided this must be checked in the
// processing class
// only when it makes sense (if the field isn't visible means that it is not applicable, therefore it
// can't be required)
// only when it makes sense (if the field isn't visible means that it is not applicable,
// therefore it can't be required)
}
}
}
}
@@ -639,4 +682,5 @@ public class DCInputsReader {
}
throw new DCInputsReaderException("No field configuration found!");
}
}

View File

@@ -16,14 +16,14 @@ import java.util.Enumeration;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
/**
* Class to initialize / cleanup resources used by DSpace when the web application
* is started or stopped.
*/
public class DSpaceContextListener implements ServletContextListener {
private static Logger log = Logger.getLogger(DSpaceContextListener.class);
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(DSpaceContextListener.class);
/**
* Initialize any resources required by the application.

View File

@@ -1,299 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.util;
import java.io.File;
import java.io.IOException;
import java.net.UnknownHostException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import org.apache.commons.lang.time.DateUtils;
import org.apache.log4j.FileAppender;
import org.apache.log4j.helpers.LogLog;
import org.apache.log4j.spi.LoggingEvent;
/**
* Special log appender for log4j. Adds the current date (ie. year-mon) to
* the end of the file name, so that rolling on to the next log is simply
* a case of starting a new one - no renaming of old logs.
*
* This is advisable if you are using Windows, and have multiple applications
* (ie. dspace, dspace-oai, dspace-sword) that all want to write to the same log file,
* as each would otherwise try to rename the old files during rollover.
*
* An example log4j.properties (one log per month, retains three months of logs)
*
* log4j.rootCategory=INFO, A1
* log4j.appender.A1=org.dspace.app.util.DailyFileAppender
* log4j.appender.A1.File=@@log.dir@@/dspace.log
* log4j.appender.A1.DatePattern=yyyy-MM
* log4j.appender.A1.MaxLogs=3
* log4j.appender.A1.layout=org.apache.log4j.PatternLayout
* log4j.appender.A1.layout.ConversionPattern=%d %-5p %c @ %m%n
*/
public class DailyFileAppender extends FileAppender {
/**
* The fixed date pattern to be used if one is not specified.
*/
private static final String DATE_PATTERN = "yyyy-MM-dd";
/**
* The folder under which daily folders are created. This can be a absolute path
* or relative path also.
* e.g. JavaLogs/CPRILog or F:/LogFiles/CPRILog
*/
private String mstrFileName;
/**
* Used internally and contains the name of the date derived from current system date.
*/
private Date mstrDate = new Date(System.currentTimeMillis());
/**
* Holds the user specified DatePattern,
*/
private String mstrDatePattern = DATE_PATTERN;
private boolean mMonthOnly = false;
/**
* The date formatter object used for parsing the user specified DatePattern.
*/
private SimpleDateFormat mobjSDF;
private boolean mWithHostName = false;
private int mMaxLogs = 0;
/**
* Default constructor. This is required as the appender class is dynamically
* loaded.
*/
public DailyFileAppender() {
super();
}
/* (non-Javadoc)
* @see org.apache.log4j.FileAppender#activateOptions()
*/
@Override
public void activateOptions() {
setFileName();
cleanupOldFiles();
super.activateOptions();
}
/*------------------------------------------------------------------------------
* Getters
*----------------------------------------------------------------------------*/
public String getDatePattern() {
return this.mstrDatePattern;
}
@Override
public String getFile() {
return this.mstrFileName;
}
public boolean getWithHost() {
return mWithHostName;
}
public int getMaxLogs() {
return mMaxLogs;
}
/*------------------------------------------------------------------------------
* Setters
*----------------------------------------------------------------------------*/
public void setDatePattern(String pstrPattern) {
this.mstrDatePattern = checkPattern(pstrPattern);
if (mstrDatePattern.contains("dd") || mstrDatePattern.contains("DD")) {
mMonthOnly = false;
} else {
mMonthOnly = true;
}
}
@Override
public void setFile(String file) {
// Trim spaces from both ends. The users probably does not want
// trailing spaces in file names.
String val = file.trim();
mstrFileName = val;
}
public void setWithHost(boolean wh) {
mWithHostName = wh;
}
public void setMaxLogs(int ml) {
mMaxLogs = ml;
}
/*------------------------------------------------------------------------------
* Methods
*----------------------------------------------------------------------------*/
/* (non-Javadoc)
* @see org.apache.log4j.WriterAppender#subAppend(org.apache.log4j.spi.LoggingEvent)
*/
@Override
protected void subAppend(LoggingEvent pobjEvent) {
Date dtNow = new Date(System.currentTimeMillis());
boolean rollover = false;
if (mMonthOnly) {
Calendar now = Calendar.getInstance();
Calendar cur = Calendar.getInstance();
now.setTime(dtNow);
cur.setTime(mstrDate);
rollover = !(now.get(Calendar.YEAR) == cur.get(Calendar.YEAR) && now.get(Calendar.MONTH) == cur
.get(Calendar.MONTH));
} else {
rollover = !(DateUtils.isSameDay(dtNow, mstrDate));
}
if (rollover) {
try {
rollOver(dtNow);
} catch (IOException IOEx) {
LogLog.error("rollOver() failed!", IOEx);
}
}
super.subAppend(pobjEvent);
}
/*------------------------------------------------------------------------------
* Helpers
*----------------------------------------------------------------------------*/
/**
* The helper function to validate the DatePattern.
*
* @param pstrPattern The DatePattern to be validated.
* @return The validated date pattern or defautlt DATE_PATTERN
*/
private String checkPattern(String pstrPattern) {
String strRet = null;
SimpleDateFormat objFmt = new SimpleDateFormat(DATE_PATTERN);
try {
this.mobjSDF = new SimpleDateFormat(pstrPattern);
strRet = pstrPattern;
} catch (NullPointerException NPExIgnore) {
LogLog.error("Invalid DatePattern " + pstrPattern, NPExIgnore);
this.mobjSDF = objFmt;
strRet = DATE_PATTERN;
} catch (IllegalArgumentException IlArgExIgnore) {
LogLog.error("Invalid DatePattern " + pstrPattern, IlArgExIgnore);
this.mobjSDF = objFmt;
strRet = DATE_PATTERN;
} finally {
objFmt = null;
}
return strRet;
}
/**
* This function is responsible for performing the actual file rollover.
*
* @param pstrName The name of the new folder based on current system date.
* @throws IOException if IO error
*/
private static boolean deletingFiles = false;
private void cleanupOldFiles() {
// If we need to delete log files
if (mMaxLogs > 0 && !deletingFiles) {
deletingFiles = true;
// Determine the final file extension with the hostname
String hostFileExt = null;
try {
hostFileExt = "." + java.net.InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
LogLog.error("Unable to retrieve host name");
}
try {
// Array to hold the logs we are going to keep
File[] logsToKeep = new File[mMaxLogs];
// Get a 'master' file handle, and the parent directory from it
File logMaster = new File(mstrFileName);
File logDir = logMaster.getParentFile();
if (logDir.isDirectory()) {
// Iterate all the files in that directory
File[] logArr = logDir.listFiles();
for (File curLog : logArr) {
LogLog.debug("Comparing '" + curLog.getAbsolutePath() + "' to '" + mstrFileName + "'");
String name = curLog.getAbsolutePath();
// First, see if we are not using hostname, or the log file ends with this host
if (!mWithHostName || (hostFileExt != null && name.endsWith(hostFileExt))) {
// Check that the file is indeed one we want (contains the master file name)
if (name.contains(mstrFileName)) {
// Iterate through the array of logs we are keeping
for (int i = 0; curLog != null && i < logsToKeep.length; i++) {
// Have we exhausted the 'to keep' array?
if (logsToKeep[i] == null) {
// Empty space, retain this log file
logsToKeep[i] = curLog;
curLog = null;
} else if (logsToKeep[i].getName().compareTo(curLog.getName()) < 0) {
// If the 'kept' file is older than the current one
// Replace tested entry with current file
File temp = logsToKeep[i];
logsToKeep[i] = curLog;
curLog = temp;
}
}
// If we have a 'current' entry at this point, it's a log we don't want
if (curLog != null) {
LogLog.debug("Deleting log " + curLog.getName());
if (!curLog.delete()) {
LogLog.error("Unable to delete log file");
}
}
}
}
}
}
} catch (Exception e) {
// Don't worry about exceptions
} finally {
deletingFiles = false;
}
}
}
private void rollOver(Date dtNow) throws IOException {
mstrDate = dtNow;
setFileName();
this.setFile(fileName, true, bufferedIO, bufferSize);
cleanupOldFiles();
}
private void setFileName() {
fileName = mstrFileName + "." + mobjSDF.format(mstrDate);
if (mWithHostName) {
try {
fileName += "." + java.net.InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
LogLog.error("Unable to retrieve host name");
}
}
}
}

View File

@@ -12,7 +12,7 @@ import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.factory.ContentServiceFactory;
@@ -24,7 +24,7 @@ import org.dspace.core.Context;
*/
public class GoogleBitstreamComparator implements Comparator<Bitstream> {
private final static Logger log = Logger.getLogger(GoogleBitstreamComparator.class);
private final static Logger log = org.apache.logging.log4j.LogManager.getLogger(GoogleBitstreamComparator.class);
HashMap<String, Integer> priorityMap = new HashMap<>();
@@ -86,7 +86,7 @@ public class GoogleBitstreamComparator implements Comparator<Bitstream> {
if (priority1 > priority2) {
return 1;
} else if (priority1 == priority2) {
if (b1.getSize() <= b2.getSize()) {
if (b1.getSizeBytes() <= b2.getSizeBytes()) {
return 1;
} else {
return -1;

View File

@@ -26,7 +26,7 @@ import java.util.Properties;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
@@ -51,7 +51,7 @@ import org.jdom.Element;
@SuppressWarnings("deprecation")
public class GoogleMetadata {
private final static Logger log = Logger.getLogger(GoogleMetadata.class);
private final static Logger log = org.apache.logging.log4j.LogManager.getLogger(GoogleMetadata.class);
protected static final String GOOGLE_PREFIX = "google.";

View File

@@ -121,20 +121,24 @@ public class IndexVersion {
}
// Open this index directory in Lucene
Directory indexDir = FSDirectory.open(dir);
Directory indexDir = FSDirectory.open(dir.toPath());
// Get info on the Lucene segment file(s) in index directory
SegmentInfos sis = new SegmentInfos();
SegmentInfos sis;
try {
sis.read(indexDir);
sis = SegmentInfos.readLatestCommit(indexDir);
} catch (IOException ie) {
// Wrap default IOException, providing more info about which directory cannot be read
throw new IOException("Could not read Lucene segments files in " + dir.getAbsolutePath(), ie);
}
if (null == sis) {
throw new IOException("Could not read Lucene segments files in " + dir.getAbsolutePath());
}
// If we have a valid Solr index dir, but it has no existing segments
// then just return an empty string. It's a valid but empty index.
if (sis != null && sis.size() == 0) {
if (sis.size() == 0) {
return "";
}

View File

@@ -0,0 +1,227 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.util;
import java.io.File;
import java.io.IOException;
import java.sql.SQLException;
import java.util.LinkedList;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.EntityType;
import org.dspace.content.RelationshipType;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.EntityTypeService;
import org.dspace.content.service.RelationshipService;
import org.dspace.content.service.RelationshipTypeService;
import org.dspace.core.Context;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
* This script is used to initialize the database with a set of relationshiptypes that are written
* in an xml file that is given to this script.
* This XML file needs to have a proper XML structure and needs to define the variables of the RelationshipType object
*/
public class InitializeEntities {
private final static Logger log = LogManager.getLogger();
private RelationshipTypeService relationshipTypeService;
private RelationshipService relationshipService;
private EntityTypeService entityTypeService;
private InitializeEntities() {
relationshipTypeService = ContentServiceFactory.getInstance().getRelationshipTypeService();
relationshipService = ContentServiceFactory.getInstance().getRelationshipService();
entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService();
}
/**
* The main method for this script
*
* @param argv The commandline arguments given with this command
* @throws SQLException If something goes wrong with the database
* @throws AuthorizeException If something goes wrong with permissions
* @throws ParseException If something goes wrong with the parsing
*/
public static void main(String[] argv) throws SQLException, AuthorizeException, ParseException {
InitializeEntities initializeEntities = new InitializeEntities();
CommandLineParser parser = new PosixParser();
Options options = createCommandLineOptions();
CommandLine line = parser.parse(options,argv);
String fileLocation = getFileLocationFromCommandLine(line);
checkHelpEntered(options, line);
initializeEntities.run(fileLocation);
}
private static void checkHelpEntered(Options options, CommandLine line) {
if (line.hasOption("h")) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("Intialize Entities", options);
System.exit(0);
}
}
private static String getFileLocationFromCommandLine(CommandLine line) {
String query = line.getOptionValue("f");
if (StringUtils.isEmpty(query)) {
System.out.println("No file location was entered");
log.info("No file location was entered");
System.exit(1);
}
return query;
}
protected static Options createCommandLineOptions() {
Options options = new Options();
options.addOption("f", "file", true, "the location for the file containing the xml data");
return options;
}
private void run(String fileLocation) throws SQLException, AuthorizeException {
Context context = new Context();
context.turnOffAuthorisationSystem();
this.parseXMLToRelations(context, fileLocation);
context.complete();
}
private void parseXMLToRelations(Context context, String fileLocation) throws AuthorizeException {
try {
File fXmlFile = new File(fileLocation);
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = null;
dBuilder = dbFactory.newDocumentBuilder();
Document doc = dBuilder.parse(fXmlFile);
doc.getDocumentElement().normalize();
NodeList nList = doc.getElementsByTagName("type");
List<RelationshipType> relationshipTypes = new LinkedList<>();
for (int i = 0; i < nList.getLength(); i++) {
Node nNode = nList.item(i);
if (nNode.getNodeType() == Node.ELEMENT_NODE) {
Element eElement = (Element) nNode;
String leftType = eElement.getElementsByTagName("leftType").item(0).getTextContent();
String rightType = eElement.getElementsByTagName("rightType").item(0).getTextContent();
String leftLabel = eElement.getElementsByTagName("leftLabel").item(0).getTextContent();
String rightLabel = eElement.getElementsByTagName("rightLabel").item(0).getTextContent();
NodeList leftCardinalityList = eElement.getElementsByTagName("leftCardinality");
NodeList rightCardinalityList = eElement.getElementsByTagName("rightCardinality");
String leftCardinalityMin = "";
String leftCardinalityMax = "";
String rightCardinalityMin = "";
String rightCardinalityMax = "";
for (int j = 0; j < leftCardinalityList.getLength(); j++) {
Node node = leftCardinalityList.item(j);
leftCardinalityMin = getString(leftCardinalityMin,(Element) node, "min");
leftCardinalityMax = getString(leftCardinalityMax,(Element) node, "max");
}
for (int j = 0; j < rightCardinalityList.getLength(); j++) {
Node node = rightCardinalityList.item(j);
rightCardinalityMin = getString(rightCardinalityMin,(Element) node, "min");
rightCardinalityMax = getString(rightCardinalityMax,(Element) node, "max");
}
populateRelationshipType(context, leftType, rightType, leftLabel, rightLabel,
leftCardinalityMin, leftCardinalityMax,
rightCardinalityMin, rightCardinalityMax);
}
}
} catch (ParserConfigurationException | SAXException | IOException | SQLException e) {
log.error("An error occurred while parsing the XML file to relations", e);
}
}
private String getString(String leftCardinalityMin,Element node, String minOrMax) {
if (node.getElementsByTagName(minOrMax).getLength() > 0) {
leftCardinalityMin = node.getElementsByTagName(minOrMax).item(0).getTextContent();
}
return leftCardinalityMin;
}
private void populateRelationshipType(Context context, String leftType, String rightType, String leftLabel,
String rightLabel, String leftCardinalityMin, String leftCardinalityMax,
String rightCardinalityMin, String rightCardinalityMax)
throws SQLException, AuthorizeException {
EntityType leftEntityType = entityTypeService.findByEntityType(context,leftType);
if (leftEntityType == null) {
leftEntityType = entityTypeService.create(context, leftType);
}
EntityType rightEntityType = entityTypeService.findByEntityType(context, rightType);
if (rightEntityType == null) {
rightEntityType = entityTypeService.create(context, rightType);
}
Integer leftCardinalityMinInteger;
Integer leftCardinalityMaxInteger;
Integer rightCardinalityMinInteger;
Integer rightCardinalityMaxInteger;
if (StringUtils.isNotBlank(leftCardinalityMin)) {
leftCardinalityMinInteger = Integer.parseInt(leftCardinalityMin);
} else {
leftCardinalityMinInteger = null;
}
if (StringUtils.isNotBlank(leftCardinalityMax)) {
leftCardinalityMaxInteger = Integer.parseInt(leftCardinalityMax);
} else {
leftCardinalityMaxInteger = null;
}
if (StringUtils.isNotBlank(rightCardinalityMin)) {
rightCardinalityMinInteger = Integer.parseInt(rightCardinalityMin);
} else {
rightCardinalityMinInteger = null;
}
if (StringUtils.isNotBlank(rightCardinalityMax)) {
rightCardinalityMaxInteger = Integer.parseInt(rightCardinalityMax);
} else {
rightCardinalityMaxInteger = null;
}
RelationshipType relationshipType = relationshipTypeService
.findbyTypesAndLabels(context, leftEntityType, rightEntityType, leftLabel, rightLabel);
if (relationshipType == null) {
relationshipTypeService.create(context, leftEntityType, rightEntityType, leftLabel, rightLabel,
leftCardinalityMinInteger, leftCardinalityMaxInteger,
rightCardinalityMinInteger, rightCardinalityMaxInteger);
} else {
relationshipType.setLeftMinCardinality(leftCardinalityMinInteger);
relationshipType.setLeftMaxCardinality(leftCardinalityMaxInteger);
relationshipType.setRightMinCardinality(rightCardinalityMinInteger);
relationshipType.setRightMaxCardinality(rightCardinalityMaxInteger);
relationshipTypeService.update(context, relationshipType);
}
}
}

View File

@@ -14,7 +14,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.dspace.app.util.service.MetadataExposureService;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
@@ -58,7 +58,7 @@ import org.springframework.beans.factory.annotation.Autowired;
* @version $Revision: 3734 $
*/
public class MetadataExposureServiceImpl implements MetadataExposureService {
protected Logger log = Logger.getLogger(MetadataExposureServiceImpl.class);
protected Logger log = org.apache.logging.log4j.LogManager.getLogger(MetadataExposureServiceImpl.class);
protected Map<String, Set<String>> hiddenElementSets = null;
protected Map<String, Map<String, Set<String>>> hiddenElementMaps = null;

View File

@@ -20,11 +20,13 @@ import com.sun.syndication.feed.module.opensearch.OpenSearchModule;
import com.sun.syndication.feed.module.opensearch.entity.OSQuery;
import com.sun.syndication.feed.module.opensearch.impl.OpenSearchModuleImpl;
import com.sun.syndication.io.FeedException;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.dspace.app.util.service.OpenSearchService;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.discovery.IndexableObject;
import org.dspace.handle.service.HandleService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
@@ -33,7 +35,6 @@ import org.jdom.JDOMException;
import org.jdom.Namespace;
import org.jdom.output.DOMOutputter;
import org.jdom.output.XMLOutputter;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.w3c.dom.Document;
@@ -53,20 +54,14 @@ import org.w3c.dom.Document;
*
* @author Richard Rodgers
*/
public class OpenSearchServiceImpl implements OpenSearchService, InitializingBean {
private static final Logger log = Logger.getLogger(OpenSearchServiceImpl.class);
public class OpenSearchServiceImpl implements OpenSearchService {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(OpenSearchServiceImpl.class);
// are open search queries enabled?
protected boolean enabled = false;
// supported results formats
protected List<String> formats = null;
// Namespaces used
protected final String osNs = "http://a9.com/-/spec/opensearch/1.1/";
// base search UI URL
protected String uiUrl = null;
// base search service URL
protected String svcUrl = null;
@Autowired(required = true)
protected ConfigurationService configurationService;
@Autowired(required = true)
protected HandleService handleService;
@@ -75,25 +70,35 @@ public class OpenSearchServiceImpl implements OpenSearchService, InitializingBea
}
@Override
public void afterPropertiesSet() throws Exception {
ConfigurationService config = DSpaceServicesFactory.getInstance().getConfigurationService();
enabled = config.getBooleanProperty("websvc.opensearch.enable");
svcUrl = config.getProperty("dspace.url") + "/" +
config.getProperty("websvc.opensearch.svccontext");
uiUrl = config.getProperty("dspace.url") + "/" +
config.getProperty("websvc.opensearch.uicontext");
// read rest of config info if enabled
formats = new ArrayList<String>();
if (enabled) {
String[] fmts = config.getArrayProperty("websvc.opensearch.formats");
public List<String> getFormats() {
List<String> formats = new ArrayList<>();
// read formats only if enabled
if (isEnabled()) {
String[] fmts = configurationService.getArrayProperty("websvc.opensearch.formats");
formats = Arrays.asList(fmts);
}
return formats;
}
@Override
public List<String> getFormats() {
return formats;
public boolean isEnabled() {
return configurationService.getBooleanProperty("websvc.opensearch.enable");
}
/**
* Get base search service URL (websvc.opensearch.svccontext)
*/
protected String getBaseSearchServiceURL() {
return configurationService.getProperty("dspace.url") + "/" +
configurationService.getProperty("websvc.opensearch.svccontext");
}
/**
* Get base search UI URL (websvc.opensearch.uicontext)
*/
protected String getBaseSearchUIURL() {
return configurationService.getProperty("dspace.url") + "/" +
configurationService.getProperty("websvc.opensearch.uicontext");
}
@Override
@@ -115,7 +120,7 @@ public class OpenSearchServiceImpl implements OpenSearchService, InitializingBea
@Override
public String getResultsString(Context context, String format, String query, int totalResults, int start,
int pageSize,
DSpaceObject scope, List<DSpaceObject> results,
IndexableObject scope, List<IndexableObject> results,
Map<String, String> labels) throws IOException {
try {
return getResults(context, format, query, totalResults, start, pageSize, scope, results, labels)
@@ -129,7 +134,7 @@ public class OpenSearchServiceImpl implements OpenSearchService, InitializingBea
@Override
public Document getResultsDoc(Context context, String format, String query, int totalResults, int start,
int pageSize,
DSpaceObject scope, List<DSpaceObject> results, Map<String, String> labels)
IndexableObject scope, List<IndexableObject> results, Map<String, String> labels)
throws IOException {
try {
return getResults(context, format, query, totalResults, start, pageSize, scope, results, labels)
@@ -141,8 +146,8 @@ public class OpenSearchServiceImpl implements OpenSearchService, InitializingBea
}
protected SyndicationFeed getResults(Context context, String format, String query, int totalResults, int start,
int pageSize,
DSpaceObject scope, List<DSpaceObject> results, Map<String, String> labels) {
int pageSize, IndexableObject scope,
List<IndexableObject> results, Map<String, String> labels) {
// Encode results in requested format
if ("rss".equals(format)) {
format = "rss_2.0";
@@ -221,13 +226,13 @@ public class OpenSearchServiceImpl implements OpenSearchService, InitializingBea
root.addContent(fav);
}
// service URLs
for (String format : formats) {
for (String format : getFormats()) {
Element url = new Element("Url", ns).setAttribute("type", getContentType(format));
StringBuilder template = new StringBuilder();
if ("html".equals(format)) {
template.append(uiUrl);
template.append(getBaseSearchUIURL());
} else {
template.append(svcUrl);
template.append(getBaseSearchServiceURL());
}
template.append("?query={searchTerms}");
if (!"html".equals(format)) {

View File

@@ -11,7 +11,7 @@ import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
@@ -28,7 +28,7 @@ import org.springframework.util.StopWatch;
* Invocation: dsrun org.dspace.app.util.OptimizeSelectCollection
*/
public class OptimizeSelectCollection {
private static final Logger log = Logger.getLogger(OptimizeSelectCollection.class);
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(OptimizeSelectCollection.class);
private static Context context;
private static ArrayList<EPerson> brokenPeople;

View File

@@ -12,7 +12,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
/**
* Class representing a single Item Submission config definition, organized into
@@ -44,7 +44,7 @@ public class SubmissionConfig implements Serializable {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(SubmissionConfig.class);
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(SubmissionConfig.class);
/**
* Constructs a new Submission Configuration object, based on the XML

View File

@@ -19,7 +19,7 @@ import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.FactoryConfigurationError;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
@@ -71,7 +71,7 @@ public class SubmissionConfigReader {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(SubmissionConfigReader.class);
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(SubmissionConfigReader.class);
/**
* The fully qualified pathname of the directory containing the Item Submission Configuration file

View File

@@ -34,9 +34,9 @@ import com.sun.syndication.feed.synd.SyndPerson;
import com.sun.syndication.feed.synd.SyndPersonImpl;
import com.sun.syndication.io.FeedException;
import com.sun.syndication.io.SyndFeedOutput;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.Collection;
@@ -52,6 +52,7 @@ import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.discovery.IndexableObject;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
@@ -69,7 +70,7 @@ import org.w3c.dom.Document;
* @author Larry Stone
*/
public class SyndicationFeed {
protected final Logger log = Logger.getLogger(SyndicationFeed.class);
protected final Logger log = org.apache.logging.log4j.LogManager.getLogger(SyndicationFeed.class);
/**
@@ -179,12 +180,12 @@ public class SyndicationFeed {
*
* @param request request
* @param context context
* @param dso DSpaceObject
* @param dso the scope
* @param items array of objects
* @param labels label map
*/
public void populate(HttpServletRequest request, Context context, DSpaceObject dso,
List<? extends DSpaceObject> items, Map<String, String> labels) {
public void populate(HttpServletRequest request, Context context, IndexableObject dso,
List<IndexableObject> items, Map<String, String> labels) {
String logoURL = null;
String objectURL = null;
String defaultTitle = null;
@@ -208,6 +209,7 @@ public class SyndicationFeed {
if (cols != null && cols.length() > 1 && cols.contains(col.getHandle())) {
podcastFeed = true;
}
objectURL = resolveURL(request, col);
} else if (dso.getType() == Constants.COMMUNITY) {
Community comm = (Community) dso;
defaultTitle = comm.getName();
@@ -217,8 +219,9 @@ public class SyndicationFeed {
if (comms != null && comms.length() > 1 && comms.contains(comm.getHandle())) {
podcastFeed = true;
}
objectURL = resolveURL(request, comm);
}
objectURL = resolveURL(request, dso);
if (logo != null) {
logoURL = urlOfBitstream(request, logo);
}
@@ -247,11 +250,11 @@ public class SyndicationFeed {
// add entries for items
if (items != null) {
List<SyndEntry> entries = new ArrayList<SyndEntry>();
for (DSpaceObject itemDSO : items) {
if (itemDSO.getType() != Constants.ITEM) {
for (IndexableObject idxObj : items) {
if (idxObj.getType() != Constants.ITEM) {
continue;
}
Item item = (Item) itemDSO;
Item item = (Item) idxObj;
boolean hasDate = false;
SyndEntry entry = new SyndEntryImpl();
entries.add(entry);
@@ -366,7 +369,7 @@ public class SyndicationFeed {
if (ArrayUtils.contains(podcastableMIMETypes, mime)) {
SyndEnclosure enc = new SyndEnclosureImpl();
enc.setType(bit.getFormat(context).getMIMEType());
enc.setLength(bit.getSize());
enc.setLength(bit.getSizeBytes());
enc.setUrl(urlOfBitstream(request, bit));
enclosures.add(enc);
} else {

View File

@@ -22,9 +22,9 @@ import java.util.Set;
import java.util.UUID;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.collections.ListUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.commons.collections4.ListUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.dspace.content.MetadataValue;
@@ -44,7 +44,7 @@ public class Util {
// cache for source version result
private static String sourceVersion = null;
private static Logger log = Logger.getLogger(Util.class);
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(Util.class);
/**
* Default constructor. Must be protected as org.dspace.xmlworkflow.WorkflowUtils extends it
@@ -418,31 +418,33 @@ public class Util {
List<DCInputSet> inputSets = inputsReader.getInputsByCollectionHandle(col_handle);
for (DCInputSet inputSet : inputSets) {
// Replace the values of Metadatum[] with the correct ones in case
// of
// controlled vocabularies
String currentField = Utils.standardize(schema, element, qualifier, ".");
for (DCInputSet inputSet : inputSets) {
if (inputSet != null) {
int fieldsNums = inputSet.getNumberFields();
for (int p = 0; p < fieldsNums; p++) {
DCInput[] inputs = inputSet.getFields();
DCInput[][] inputs = inputSet.getFields();
if (inputs != null) {
for (int i = 0; i < inputs.length; i++) {
String inputField = Utils.standardize(inputs[i].getSchema(), inputs[i].getElement(),
inputs[i].getQualifier(), ".");
for (int j = 0; j < inputs[i].length; j++) {
String inputField = Utils
.standardize(inputs[i][j].getSchema(), inputs[i][j].getElement(),
inputs[i][j].getQualifier(), ".");
if (currentField.equals(inputField)) {
myInputs = inputs[i];
myInputs = inputs[i][j];
myInputsFound = true;
break;
}
}
}
}
@@ -471,6 +473,26 @@ public class Util {
return toReturn;
}
/**
* Split a list in an array of i sub-lists uniformly sized
*
* @param idsList the list to split
* @param i the number of sublists to return
*
* @return an array of sub-lists of fixed size
*/
public static <T> List<T>[] splitList(List<T> idsList, int i) {
int setmin = idsList.size() / i;
List<T>[] result = new List[i];
int offset = 0;
for (int idx = 0; idx < i - 1; idx++) {
result[idx] = idsList.subList(offset, offset + setmin);
offset += setmin;
}
result[i - 1] = idsList.subList(offset, idsList.size());
return result;
}
public static List<String> differenceInSubmissionFields(Collection fromCollection, Collection toCollection)
throws DCInputsReaderException {
DCInputsReader reader = new DCInputsReader();
@@ -480,15 +502,19 @@ public class Util {
Set<String> fromFieldName = new HashSet<>();
Set<String> toFieldName = new HashSet<>();
for (DCInputSet ff : from) {
for (DCInput fdc : ff.getFields()) {
for (DCInput[] fdcrow : ff.getFields()) {
for (DCInput fdc : fdcrow) {
fromFieldName.add(fdc.getFieldName());
}
}
}
for (DCInputSet tt : to) {
for (DCInput tdc : tt.getFields()) {
for (DCInput[] tdcrow : tt.getFields()) {
for (DCInput tdc : tdcrow) {
toFieldName.add(tdc.getFieldName());
}
}
}
return ListUtils.removeAll(fromFieldName, toFieldName);
}

View File

@@ -18,7 +18,7 @@ import org.apache.http.HttpStatus;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpHead;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.dspace.app.util.dao.WebAppDAO;
import org.dspace.app.util.service.WebAppService;
import org.dspace.core.Context;
@@ -33,7 +33,7 @@ import org.springframework.beans.factory.annotation.Autowired;
*/
public class WebAppServiceImpl implements WebAppService {
private final Logger log = Logger.getLogger(WebAppServiceImpl.class);
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(WebAppServiceImpl.class);
@Autowired(required = true)
protected WebAppDAO webAppDAO;

View File

@@ -11,7 +11,7 @@ import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;

View File

@@ -14,6 +14,7 @@ import java.util.Map;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
import org.dspace.discovery.IndexableObject;
import org.w3c.dom.Document;
/**
@@ -41,6 +42,13 @@ public interface OpenSearchService {
*/
public List<String> getFormats();
/**
* Determine if the module is active
*
* @return boolean indicator if the OpenSearch module is enabled or not
*/
public boolean isEnabled();
/**
* Returns a mime-type associated with passed format
*
@@ -76,7 +84,7 @@ public interface OpenSearchService {
* @param totalResults - the hit count
* @param start - start result index
* @param pageSize - page size
* @param scope - search scope, null or community/collection handle
* @param scope - search scope, null or the community/collection
* @param results the retreived DSpace objects satisfying search
* @param labels labels to apply - format specific
* @return formatted search results
@@ -84,7 +92,7 @@ public interface OpenSearchService {
*/
public String getResultsString(Context context, String format, String query, int totalResults, int start,
int pageSize,
DSpaceObject scope, List<DSpaceObject> results,
IndexableObject scope, List<IndexableObject> results,
Map<String, String> labels) throws IOException;
/**
@@ -96,7 +104,7 @@ public interface OpenSearchService {
* @param totalResults - the hit count
* @param start - start result index
* @param pageSize - page size
* @param scope - search scope, null or community/collection handle
* @param scope - search scope, null or the community/collection
* @param results the retreived DSpace objects satisfying search
* @param labels labels to apply - format specific
* @return formatted search results
@@ -104,7 +112,7 @@ public interface OpenSearchService {
*/
public Document getResultsDoc(Context context, String format, String query, int totalResults, int start,
int pageSize,
DSpaceObject scope, List<DSpaceObject> results, Map<String, String> labels)
IndexableObject scope, List<IndexableObject> results, Map<String, String> labels)
throws IOException;
public DSpaceObject resolveScope(Context context, String scope) throws SQLException;

View File

@@ -210,4 +210,10 @@ public interface AuthenticationMethod {
public String loginPageURL(Context context,
HttpServletRequest request,
HttpServletResponse response);
/**
* Returns a short name that uniquely identifies this authentication method
* @return The authentication method name
*/
public String getName();
}

View File

@@ -9,6 +9,7 @@ package org.dspace.authenticate;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -16,15 +17,15 @@ import java.util.UUID;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.collections.ListUtils;
import org.apache.log4j.Logger;
import org.dspace.core.ConfigurationManager;
import org.apache.logging.log4j.Logger;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.core.factory.CoreServiceFactory;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.GroupService;
import org.dspace.service.ClientInfoService;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
@@ -49,7 +50,7 @@ public class IPAuthentication implements AuthenticationMethod {
/**
* Our logger
*/
private static Logger log = Logger.getLogger(IPAuthentication.class);
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(IPAuthentication.class);
/**
* Whether to look for x-forwarded headers for logging IP addresses
@@ -67,6 +68,7 @@ public class IPAuthentication implements AuthenticationMethod {
protected List<IPMatcher> ipNegativeMatchers;
protected GroupService groupService;
protected ClientInfoService clientInfoService;
/**
@@ -91,6 +93,7 @@ public class IPAuthentication implements AuthenticationMethod {
ipMatcherGroupIDs = new HashMap<>();
ipMatcherGroupNames = new HashMap<>();
groupService = EPersonServiceFactory.getInstance().getGroupService();
clientInfoService = CoreServiceFactory.getInstance().getClientInfoService();
List<String> propNames = DSpaceServicesFactory.getInstance().getConfigurationService()
.getPropertyKeys("authentication-ip");
@@ -164,23 +167,12 @@ public class IPAuthentication implements AuthenticationMethod {
public List<Group> getSpecialGroups(Context context, HttpServletRequest request)
throws SQLException {
if (request == null) {
return ListUtils.EMPTY_LIST;
return Collections.EMPTY_LIST;
}
List<Group> groups = new ArrayList<Group>();
// Get the user's IP address
String addr = request.getRemoteAddr();
if (useProxies == null) {
useProxies = ConfigurationManager.getBooleanProperty("useProxies", false);
}
if (useProxies && request.getHeader("X-Forwarded-For") != null) {
/* This header is a comma delimited list */
for (String xfip : request.getHeader("X-Forwarded-For").split(",")) {
if (!request.getHeader("X-Forwarded-For").contains(addr)) {
addr = xfip.trim();
}
}
}
String addr = clientInfoService.getClientIp(request);
for (IPMatcher ipm : ipMatchers) {
try {
@@ -276,4 +268,9 @@ public class IPAuthentication implements AuthenticationMethod {
HttpServletResponse response) {
return null;
}
@Override
public String getName() {
return "ip";
}
}

View File

@@ -10,7 +10,7 @@ package org.dspace.authenticate;
import java.net.Inet6Address;
import java.net.UnknownHostException;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
/**
* <p>
@@ -38,7 +38,7 @@ import org.apache.log4j.Logger;
* @version $Revision$
*/
public class IPMatcher {
private static Logger log = Logger.getLogger(IPMatcher.class);
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(IPMatcher.class);
/**
* Network to match

View File

@@ -10,6 +10,7 @@ package org.dspace.authenticate;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Collections;
import java.util.Hashtable;
import java.util.List;
import javax.naming.NamingEnumeration;
@@ -27,9 +28,8 @@ import javax.naming.ldap.StartTlsResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.collections.ListUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.authenticate.factory.AuthenticateServiceFactory;
import org.dspace.authenticate.service.AuthenticationService;
import org.dspace.authorize.AuthorizeException;
@@ -61,7 +61,7 @@ public class LDAPAuthentication
/**
* log4j category
*/
private static Logger log = Logger.getLogger(LDAPAuthentication.class);
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(LDAPAuthentication.class);
protected AuthenticationService authenticationService = AuthenticateServiceFactory.getInstance()
.getAuthenticationService();
@@ -136,7 +136,7 @@ public class LDAPAuthentication
log.warn(LogManager.getHeader(context,
"ldap_specialgroup",
"Group defined in login.specialgroup does not exist"));
return ListUtils.EMPTY_LIST;
return Collections.EMPTY_LIST;
} else {
return Arrays.asList(ldapGroup);
}
@@ -145,7 +145,7 @@ public class LDAPAuthentication
} catch (Exception npe) {
// The user is not an LDAP user, so we don't need to worry about them
}
return ListUtils.EMPTY_LIST;
return Collections.EMPTY_LIST;
}
/*
@@ -639,6 +639,11 @@ public class LDAPAuthentication
return null;
}
@Override
public String getName() {
return "ldap";
}
/*
* Add authenticated users to the group defined in dspace.cfg by
* the authentication-ldap.login.groupmap.* key.

View File

@@ -9,13 +9,13 @@ package org.dspace.authenticate;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.collections.ListUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.eperson.EPerson;
@@ -49,7 +49,7 @@ public class PasswordAuthentication
/**
* log4j category
*/
private static Logger log = Logger.getLogger(PasswordAuthentication.class);
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PasswordAuthentication.class);
/**
@@ -151,7 +151,7 @@ public class PasswordAuthentication
"password_specialgroup",
"Group defined in modules/authentication-password.cfg login" +
".specialgroup does not exist"));
return ListUtils.EMPTY_LIST;
return Collections.EMPTY_LIST;
} else {
return Arrays.asList(specialGroup);
}
@@ -160,7 +160,7 @@ public class PasswordAuthentication
} catch (Exception e) {
log.error(LogManager.getHeader(context, "getSpecialGroups", ""), e);
}
return ListUtils.EMPTY_LIST;
return Collections.EMPTY_LIST;
}
/**
@@ -242,4 +242,9 @@ public class PasswordAuthentication
HttpServletResponse response) {
return null;
}
@Override
public String getName() {
return "password";
}
}

View File

@@ -12,6 +12,7 @@ import java.net.URLEncoder;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
@@ -22,9 +23,8 @@ import java.util.UUID;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.collections.ListUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.authenticate.factory.AuthenticateServiceFactory;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataField;
@@ -70,7 +70,7 @@ public class ShibAuthentication implements AuthenticationMethod {
/**
* log4j category
*/
private static Logger log = Logger.getLogger(ShibAuthentication.class);
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(ShibAuthentication.class);
/**
* Additional metadata mappings
@@ -288,7 +288,7 @@ public class ShibAuthentication implements AuthenticationMethod {
if (request == null ||
context.getCurrentUser() == null ||
request.getSession().getAttribute("shib.authenticated") == null) {
return ListUtils.EMPTY_LIST;
return Collections.EMPTY_LIST;
}
// If we have already calculated the special groups then return them.
@@ -404,7 +404,7 @@ public class ShibAuthentication implements AuthenticationMethod {
return new ArrayList<>(groups);
} catch (Throwable t) {
log.error("Unable to validate any sepcial groups this user may belong too because of an exception.", t);
return ListUtils.EMPTY_LIST;
return Collections.EMPTY_LIST;
}
}
@@ -538,6 +538,11 @@ public class ShibAuthentication implements AuthenticationMethod {
}
}
@Override
public String getName() {
return "shibboleth";
}
/**
* Identify an existing EPerson based upon the shibboleth attributes provided on
* the request object. There are three cases where this can occurr, each as

View File

@@ -21,6 +21,7 @@ import java.security.cert.CertificateFactory;
import java.security.cert.X509Certificate;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
import java.util.StringTokenizer;
@@ -28,9 +29,8 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.commons.collections.ListUtils;
import org.apache.commons.lang.ArrayUtils;
import org.apache.log4j.Logger;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.authenticate.factory.AuthenticateServiceFactory;
import org.dspace.authenticate.service.AuthenticationService;
import org.dspace.authorize.AuthorizeException;
@@ -105,7 +105,7 @@ public class X509Authentication implements AuthenticationMethod {
/**
* log4j category
*/
private static Logger log = Logger.getLogger(X509Authentication.class);
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(X509Authentication.class);
/**
* public key of CA to check client certs against.
@@ -442,7 +442,7 @@ public class X509Authentication implements AuthenticationMethod {
public List<Group> getSpecialGroups(Context context, HttpServletRequest request)
throws SQLException {
if (request == null) {
return ListUtils.EMPTY_LIST;
return Collections.EMPTY_LIST;
}
Boolean authenticated = false;
@@ -472,7 +472,7 @@ public class X509Authentication implements AuthenticationMethod {
return groups;
}
return ListUtils.EMPTY_LIST;
return Collections.EMPTY_LIST;
}
/**
@@ -589,4 +589,9 @@ public class X509Authentication implements AuthenticationMethod {
HttpServletResponse response) {
return loginPageURL;
}
@Override
public String getName() {
return "x509";
}
}

View File

@@ -7,6 +7,7 @@
*/
package org.dspace.authority;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.List;
@@ -22,7 +23,8 @@ import org.apache.solr.client.solrj.response.QueryResponse;
*/
public interface AuthoritySearchService {
public QueryResponse search(SolrQuery query) throws SolrServerException, MalformedURLException;
public QueryResponse search(SolrQuery query)
throws SolrServerException, MalformedURLException, IOException;
public List<String> getAllIndexedMetadataFields() throws Exception;

View File

@@ -46,17 +46,11 @@ public class AuthorityServiceImpl implements AuthorityService {
}
for (AuthorityIndexerInterface indexerInterface : indexers) {
indexerInterface.init(context, item);
while (indexerInterface.hasMore()) {
AuthorityValue authorityValue = indexerInterface.nextValue();
if (authorityValue != null) {
indexingService.indexContent(authorityValue, true);
List<AuthorityValue> authorityValues = indexerInterface.getAuthorityValues(context , item);
for (AuthorityValue authorityValue : authorityValues) {
indexingService.indexContent(authorityValue);
}
}
//Close up
indexerInterface.close();
}
//Commit to our server
indexingService.commit();
}

View File

@@ -12,10 +12,10 @@ import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrInputDocument;
@@ -30,7 +30,7 @@ import org.dspace.core.ConfigurationManager;
*/
public class AuthoritySolrServiceImpl implements AuthorityIndexingService, AuthoritySearchService {
private static final Logger log = Logger.getLogger(AuthoritySolrServiceImpl.class);
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthoritySolrServiceImpl.class);
protected AuthoritySolrServiceImpl() {
@@ -39,16 +39,17 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho
/**
* Non-Static CommonsHttpSolrServer for processing indexing events.
*/
protected HttpSolrServer solr = null;
protected HttpSolrClient solr = null;
protected HttpSolrServer getSolr() throws MalformedURLException, SolrServerException {
protected HttpSolrClient getSolr()
throws MalformedURLException, SolrServerException, IOException {
if (solr == null) {
String solrService = ConfigurationManager.getProperty("solr.authority.server");
log.debug("Solr authority URL: " + solrService);
solr = new HttpSolrServer(solrService);
solr = new HttpSolrClient.Builder(solrService).build();
solr.setBaseURL(solrService);
SolrQuery solrQuery = new SolrQuery().setQuery("*:*");
@@ -60,7 +61,7 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho
}
@Override
public void indexContent(AuthorityValue value, boolean force) {
public void indexContent(AuthorityValue value) {
SolrInputDocument doc = value.getSolrInputDocument();
try {
@@ -129,7 +130,8 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho
}
@Override
public QueryResponse search(SolrQuery query) throws SolrServerException, MalformedURLException {
public QueryResponse search(SolrQuery query)
throws SolrServerException, MalformedURLException, IOException {
return getSolr().query(query);
}

View File

@@ -12,7 +12,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
/**
* This class contains a list of active authority types.
@@ -32,7 +32,7 @@ public class AuthorityTypes {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(AuthorityTypes.class);
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthorityTypes.class);
protected List<AuthorityValue> types = new ArrayList<AuthorityValue>();

View File

@@ -14,8 +14,8 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrInputDocument;
import org.dspace.authorize.AuthorizeException;
@@ -234,7 +234,7 @@ public class AuthorityValue {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(AuthorityValue.class);
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthorityValue.class);
@Override
public String toString() {

View File

@@ -13,8 +13,8 @@ import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
@@ -34,7 +34,7 @@ import org.springframework.beans.factory.annotation.Autowired;
*/
public class AuthorityValueServiceImpl implements AuthorityValueService {
private final Logger log = Logger.getLogger(AuthorityValueServiceImpl.class);
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthorityValueServiceImpl.class);
@Autowired(required = true)
protected AuthorityTypes authorityTypes;

View File

@@ -12,8 +12,8 @@ import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrInputDocument;

View File

@@ -0,0 +1,20 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority;
import java.util.List;
/**
* @author Jonas Van Goolen (jonas at atmire dot com)
*/
public interface SolrAuthorityInterface {
List<AuthorityValue> queryAuthorities(String text, int max);
AuthorityValue queryAuthorityID(String id);
}

View File

@@ -19,7 +19,7 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityValueService;
import org.dspace.content.Item;
@@ -40,7 +40,7 @@ public class UpdateAuthorities {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(UpdateAuthorities.class);
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(UpdateAuthorities.class);
protected PrintWriter print = null;

View File

@@ -12,7 +12,7 @@ import java.util.HashSet;
import java.util.Set;
import java.util.UUID;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityService;
import org.dspace.content.DSpaceObject;
@@ -33,7 +33,7 @@ import org.dspace.event.Event;
*/
public class AuthorityConsumer implements Consumer {
private final Logger log = Logger.getLogger(AuthorityConsumer.class);
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthorityConsumer.class);
/**
* A set of all item IDs installed which need their authority updated

View File

@@ -8,13 +8,17 @@
package org.dspace.authority.indexer;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityService;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
/**
@@ -25,7 +29,7 @@ import org.dspace.core.Context;
*/
public class AuthorityIndexClient {
private static Logger log = Logger.getLogger(AuthorityIndexClient.class);
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthorityIndexClient.class);
protected static final AuthorityService authorityService =
AuthorityServiceFactory.getInstance().getAuthorityService();
@@ -33,6 +37,8 @@ public class AuthorityIndexClient {
AuthorityServiceFactory.getInstance().getAuthorityIndexingService();
protected static final List<AuthorityIndexerInterface> indexers =
AuthorityServiceFactory.getInstance().getAuthorityIndexers();
protected static final ItemService itemService =
ContentServiceFactory.getInstance().getItemService();
/**
* Default constructor
@@ -64,15 +70,17 @@ public class AuthorityIndexClient {
for (AuthorityIndexerInterface indexerInterface : indexers) {
log.info("Initialize " + indexerInterface.getClass().getName());
System.out.println("Initialize " + indexerInterface.getClass().getName());
indexerInterface.init(context, true);
while (indexerInterface.hasMore()) {
AuthorityValue authorityValue = indexerInterface.nextValue();
if (authorityValue != null) {
Iterator<Item> allItems = itemService.findAll(context);
Map<String, AuthorityValue> authorityCache = new HashMap<>();
while (allItems.hasNext()) {
Item item = allItems.next();
List<AuthorityValue> authorityValues = indexerInterface.getAuthorityValues(
context, item, authorityCache);
for (AuthorityValue authorityValue : authorityValues) {
toIndexValues.put(authorityValue.getId(), authorityValue);
}
context.uncacheEntity(item);
}
//Close up
indexerInterface.close();
}
@@ -82,7 +90,7 @@ public class AuthorityIndexClient {
log.info("Writing new data");
System.out.println("Writing new data");
for (String id : toIndexValues.keySet()) {
indexingService.indexContent(toIndexValues.get(id), true);
indexingService.indexContent(toIndexValues.get(id));
indexingService.commit();
}

View File

@@ -9,6 +9,8 @@
package org.dspace.authority.indexer;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;
import org.dspace.authority.AuthorityValue;
import org.dspace.authorize.AuthorizeException;
@@ -23,17 +25,10 @@ import org.dspace.core.Context;
*/
public interface AuthorityIndexerInterface {
public void init(Context context, Item item);
public void init(Context context, boolean useCache);
public void init(Context context);
public AuthorityValue nextValue();
public boolean hasMore() throws SQLException, AuthorizeException;
public void close();
public List<AuthorityValue> getAuthorityValues(Context context, Item item)
throws SQLException, AuthorizeException;
public List<AuthorityValue> getAuthorityValues(Context context, Item item, Map<String, AuthorityValue> cache)
throws SQLException, AuthorizeException;
public boolean isConfiguredProperly();
}

View File

@@ -19,7 +19,7 @@ import org.dspace.authority.AuthorityValue;
public interface AuthorityIndexingService {
public void indexContent(AuthorityValue value, boolean force);
public void indexContent(AuthorityValue value);
public void cleanIndex() throws Exception;

View File

@@ -9,14 +9,12 @@ package org.dspace.authority.indexer;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.service.AuthorityValueService;
import org.dspace.authorize.AuthorizeException;
@@ -31,12 +29,13 @@ import org.springframework.beans.factory.annotation.Autowired;
/**
* DSpaceAuthorityIndexer is used in IndexClient, which is called by the AuthorityConsumer and the indexing-script.
* <p>
* An instance of DSpaceAuthorityIndexer is bound to a list of items.
* This can be one item or all items too depending on the init() method.
* The DSpaceAuthorityIndexer will return a list of all authority values for a
* given item. It will return an authority value for all metadata fields defined
* in dspace.conf with 'authority.author.indexer.field'.
* <p>
* DSpaceAuthorityIndexer lets you iterate over each metadata value
* for each metadata field defined in dspace.cfg with 'authority.author.indexer.field'
* for each item in the list.
* You have to call getAuthorityValues for every Item you want to index. But you
* can supply an optional cache, to save the mapping from the metadata value to
* the new authority values for metadata fields without an authority key.
* <p>
*
* @author Antoine Snyers (antoine at atmire.com)
@@ -46,25 +45,16 @@ import org.springframework.beans.factory.annotation.Autowired;
*/
public class DSpaceAuthorityIndexer implements AuthorityIndexerInterface, InitializingBean {
private static final Logger log = Logger.getLogger(DSpaceAuthorityIndexer.class);
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(DSpaceAuthorityIndexer.class);
protected Iterator<Item> itemIterator;
protected Item currentItem;
/**
* The list of metadata fields which are to be indexed *
*/
protected List<String> metadataFields;
protected int currentFieldIndex;
protected int currentMetadataIndex;
protected AuthorityValue nextValue;
protected Context context;
@Autowired(required = true)
protected AuthorityValueService authorityValueService;
@Autowired(required = true)
protected ItemService itemService;
protected boolean useCache;
protected Map<String, AuthorityValue> cache;
@Autowired(required = true)
protected ConfigurationService configurationService;
@@ -80,146 +70,83 @@ public class DSpaceAuthorityIndexer implements AuthorityIndexerInterface, Initia
}
}
@Override
public void init(Context context, Item item) {
ArrayList<Item> itemList = new ArrayList<>();
itemList.add(item);
this.itemIterator = itemList.iterator();
currentItem = this.itemIterator.next();
initialize(context);
public List<AuthorityValue> getAuthorityValues(Context context, Item item)
throws SQLException, AuthorizeException {
return getAuthorityValues(context, item, null);
}
@Override
public void init(Context context) {
init(context, false);
public List<AuthorityValue> getAuthorityValues(Context context, Item item, Map<String, AuthorityValue> cache)
throws SQLException, AuthorizeException {
List<AuthorityValue> values = new ArrayList<>();
for (String metadataField : metadataFields) {
List<MetadataValue> metadataValues = itemService.getMetadataByMetadataString(item, metadataField);
for (MetadataValue metadataValue : metadataValues) {
String content = metadataValue.getValue();
String authorityKey = metadataValue.getAuthority();
// We only want to update our item IF our UUID is not present
// or if we need to generate one.
boolean requiresItemUpdate = StringUtils.isBlank(authorityKey) ||
StringUtils.startsWith(authorityKey, AuthorityValueService.GENERATE);
AuthorityValue value = null;
if (StringUtils.isBlank(authorityKey) && cache != null) {
// This is a value currently without an authority. So query
// the cache, if an authority is found for the exact value.
value = cache.get(content);
}
@Override
public void init(Context context, boolean useCache) {
if (value == null) {
value = getAuthorityValue(context, metadataField, content,authorityKey);
}
if (value != null) {
if (requiresItemUpdate) {
value.updateItem(context, item, metadataValue);
try {
this.itemIterator = itemService.findAll(context);
currentItem = this.itemIterator.next();
} catch (SQLException e) {
log.error("Error while retrieving all items in the metadata indexer");
}
initialize(context);
this.useCache = useCache;
}
protected void initialize(Context context) {
this.context = context;
currentFieldIndex = 0;
currentMetadataIndex = 0;
useCache = false;
cache = new HashMap<>();
}
@Override
public AuthorityValue nextValue() {
return nextValue;
}
@Override
public boolean hasMore() throws SQLException, AuthorizeException {
if (currentItem == null) {
return false;
}
// 1. iterate over the metadata values
String metadataField = metadataFields.get(currentFieldIndex);
List<MetadataValue> values = itemService.getMetadataByMetadataString(currentItem, metadataField);
if (currentMetadataIndex < values.size()) {
prepareNextValue(metadataField, values.get(currentMetadataIndex));
currentMetadataIndex++;
return true;
} else {
// 2. iterate over the metadata fields
if ((currentFieldIndex + 1) < metadataFields.size()) {
currentFieldIndex++;
//Reset our current metadata index since we are moving to another field
currentMetadataIndex = 0;
return hasMore();
} else {
// 3. iterate over the items
if (itemIterator.hasNext()) {
currentItem = itemIterator.next();
//Reset our current field index
currentFieldIndex = 0;
//Reset our current metadata index
currentMetadataIndex = 0;
} else {
currentItem = null;
}
return hasMore();
}
}
}
/**
* This method looks at the authority of a metadata.
* If the authority can be found in solr, that value is reused.
* Otherwise a new authority value will be generated that will be indexed in solr.
* If the authority starts with AuthorityValueGenerator.GENERATE, a specific type of AuthorityValue will be
* generated.
* Depending on the type this may involve querying an external REST service
*
* @param metadataField Is one of the fields defined in dspace.cfg to be indexed.
* @param value Is one of the values of the given metadataField in one of the items being indexed.
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
*/
protected void prepareNextValue(String metadataField, MetadataValue value) throws SQLException, AuthorizeException {
nextValue = null;
String content = value.getValue();
String authorityKey = value.getAuthority();
//We only want to update our item IF our UUID is not present or if we need to generate one.
boolean requiresItemUpdate = StringUtils.isBlank(authorityKey) || StringUtils
.startsWith(authorityKey, AuthorityValueService.GENERATE);
if (StringUtils.isNotBlank(authorityKey) && !authorityKey.startsWith(AuthorityValueService.GENERATE)) {
// !uid.startsWith(AuthorityValueGenerator.GENERATE) is not strictly necessary here but it prevents
// exceptions in solr
nextValue = authorityValueService.findByUID(context, authorityKey);
}
if (nextValue == null && StringUtils.isBlank(authorityKey) && useCache) {
// A metadata without authority is being indexed
// If there is an exact match in the cache, reuse it rather than adding a new one.
AuthorityValue cachedAuthorityValue = cache.get(content);
if (cachedAuthorityValue != null) {
nextValue = cachedAuthorityValue;
}
}
if (nextValue == null) {
nextValue = authorityValueService
.generate(context, authorityKey, content, metadataField.replaceAll("\\.", "_"));
}
if (nextValue != null && requiresItemUpdate) {
nextValue.updateItem(context, currentItem, value);
try {
itemService.update(context, currentItem);
itemService.update(context, item);
} catch (Exception e) {
log.error("Error creating a metadatavalue's authority", e);
}
}
if (useCache) {
cache.put(content, nextValue);
if (cache != null) {
cache.put(content, value);
}
values.add(value);
} else {
log.error("Error getting an authority value for " +
"the metadata value \"" + content + "\" " +
"in the field \"" + metadataField + "\" " +
"of the item " + item.getHandle());
}
}
@Override
public void close() {
itemIterator = null;
cache.clear();
}
return values;
}
/**
* This method looks at the authority of a metadata value.
* If the authority can be found in solr, that value is reused.
* Otherwise a new authority value will be generated that will be indexed in solr.
*
* If the authority starts with AuthorityValueGenerator.GENERATE, a specific type of AuthorityValue will be
* generated.
* Depending on the type this may involve querying an external REST service
*
* @param context Current DSpace context
* @param metadataField Is one of the fields defined in dspace.cfg to be indexed.
* @param metadataContent Content of the current metadata value.
* @param metadataAuthorityKey Existing authority of the metadata value.
*/
private AuthorityValue getAuthorityValue(Context context, String metadataField,
String metadataContent, String metadataAuthorityKey) {
if (StringUtils.isNotBlank(metadataAuthorityKey) &&
!metadataAuthorityKey.startsWith(AuthorityValueService.GENERATE)) {
// !uid.startsWith(AuthorityValueGenerator.GENERATE) is not strictly
// necessary here but it prevents exceptions in solr
AuthorityValue value = authorityValueService.findByUID(context, metadataAuthorityKey);
if (value != null) {
return value;
}
}
return authorityValueService.generate(context, metadataAuthorityKey,
metadataContent, metadataField.replaceAll("\\.", "_"));
}
@Override

View File

@@ -1,87 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.orcid.model.Bio;
import org.dspace.authority.orcid.model.Work;
import org.dspace.authority.orcid.xml.XMLtoBio;
import org.dspace.authority.orcid.xml.XMLtoWork;
import org.dspace.authority.rest.RestSource;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.w3c.dom.Document;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Orcid extends RestSource {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(Orcid.class);
private static Orcid orcid;
public static Orcid getOrcid() {
if (orcid == null) {
orcid = DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName("OrcidSource", Orcid.class);
}
return orcid;
}
private Orcid(String url) {
super(url);
}
public Bio getBio(String id) {
Document bioDocument = restConnector.get(id + "/orcid-bio");
XMLtoBio converter = new XMLtoBio();
Bio bio = converter.convert(bioDocument).get(0);
bio.setOrcid(id);
return bio;
}
public List<Work> getWorks(String id) {
Document document = restConnector.get(id + "/orcid-works");
XMLtoWork converter = new XMLtoWork();
return converter.convert(document);
}
public List<Bio> queryBio(String name, int start, int rows) {
Document bioDocument = restConnector
.get("search/orcid-bio?q=" + URLEncoder.encode("\"" + name + "\"") + "&start=" + start + "&rows=" + rows);
XMLtoBio converter = new XMLtoBio();
return converter.convert(bioDocument);
}
@Override
public List<AuthorityValue> queryAuthorities(String text, int max) {
List<Bio> bios = queryBio(text, 0, max);
List<AuthorityValue> authorities = new ArrayList<AuthorityValue>();
for (Bio bio : bios) {
authorities.add(OrcidAuthorityValue.create(bio));
}
return authorities;
}
@Override
public AuthorityValue queryAuthorityID(String id) {
Bio bio = getBio(id);
return OrcidAuthorityValue.create(bio);
}
}

View File

@@ -1,328 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrInputDocument;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.AuthorityValueServiceImpl;
import org.dspace.authority.PersonAuthorityValue;
import org.dspace.authority.orcid.model.Bio;
import org.dspace.authority.orcid.model.BioExternalIdentifier;
import org.dspace.authority.orcid.model.BioName;
import org.dspace.authority.orcid.model.BioResearcherUrl;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class OrcidAuthorityValue extends PersonAuthorityValue {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(OrcidAuthorityValue.class);
private String orcid_id;
private Map<String, List<String>> otherMetadata = new HashMap<String, List<String>>();
private boolean update; // used in setValues(Bio bio)
/**
* Creates an instance of OrcidAuthorityValue with only uninitialized fields.
* This is meant to be filled in with values from an existing record.
* To create a brand new OrcidAuthorityValue, use create()
*/
public OrcidAuthorityValue() {
}
public OrcidAuthorityValue(SolrDocument document) {
super(document);
}
public String getOrcid_id() {
return orcid_id;
}
public void setOrcid_id(String orcid_id) {
this.orcid_id = orcid_id;
}
public Map<String, List<String>> getOtherMetadata() {
return otherMetadata;
}
public void addOtherMetadata(String label, String data) {
List<String> strings = otherMetadata.get(label);
if (strings == null) {
strings = new ArrayList<String>();
}
strings.add(data);
otherMetadata.put(label, strings);
}
@Override
public SolrInputDocument getSolrInputDocument() {
SolrInputDocument doc = super.getSolrInputDocument();
if (StringUtils.isNotBlank(getOrcid_id())) {
doc.addField("orcid_id", getOrcid_id());
}
for (String t : otherMetadata.keySet()) {
List<String> data = otherMetadata.get(t);
for (String data_entry : data) {
doc.addField("label_" + t, data_entry);
}
}
return doc;
}
@Override
public void setValues(SolrDocument document) {
super.setValues(document);
this.orcid_id = String.valueOf(document.getFieldValue("orcid_id"));
otherMetadata = new HashMap<String, List<String>>();
for (String fieldName : document.getFieldNames()) {
String labelPrefix = "label_";
if (fieldName.startsWith(labelPrefix)) {
String label = fieldName.substring(labelPrefix.length());
List<String> list = new ArrayList<String>();
Collection<Object> fieldValues = document.getFieldValues(fieldName);
for (Object o : fieldValues) {
list.add(String.valueOf(o));
}
otherMetadata.put(label, list);
}
}
}
public static OrcidAuthorityValue create() {
OrcidAuthorityValue orcidAuthorityValue = new OrcidAuthorityValue();
orcidAuthorityValue.setId(UUID.randomUUID().toString());
orcidAuthorityValue.updateLastModifiedDate();
orcidAuthorityValue.setCreationDate(new Date());
return orcidAuthorityValue;
}
/**
* Create an authority based on a given orcid bio
*
* @param bio Bio
* @return OrcidAuthorityValue
*/
public static OrcidAuthorityValue create(Bio bio) {
OrcidAuthorityValue authority = OrcidAuthorityValue.create();
authority.setValues(bio);
return authority;
}
public boolean setValues(Bio bio) {
BioName name = bio.getName();
if (updateValue(bio.getOrcid(), getOrcid_id())) {
setOrcid_id(bio.getOrcid());
}
if (updateValue(name.getFamilyName(), getLastName())) {
setLastName(name.getFamilyName());
}
if (updateValue(name.getGivenNames(), getFirstName())) {
setFirstName(name.getGivenNames());
}
if (StringUtils.isNotBlank(name.getCreditName())) {
if (!getNameVariants().contains(name.getCreditName())) {
addNameVariant(name.getCreditName());
update = true;
}
}
for (String otherName : name.getOtherNames()) {
if (!getNameVariants().contains(otherName)) {
addNameVariant(otherName);
update = true;
}
}
if (updateOtherMetadata("country", bio.getCountry())) {
addOtherMetadata("country", bio.getCountry());
}
for (String keyword : bio.getKeywords()) {
if (updateOtherMetadata("keyword", keyword)) {
addOtherMetadata("keyword", keyword);
}
}
for (BioExternalIdentifier externalIdentifier : bio.getBioExternalIdentifiers()) {
if (updateOtherMetadata("external_identifier", externalIdentifier.toString())) {
addOtherMetadata("external_identifier", externalIdentifier.toString());
}
}
for (BioResearcherUrl researcherUrl : bio.getResearcherUrls()) {
if (updateOtherMetadata("researcher_url", researcherUrl.toString())) {
addOtherMetadata("researcher_url", researcherUrl.toString());
}
}
if (updateOtherMetadata("biography", bio.getBiography())) {
addOtherMetadata("biography", bio.getBiography());
}
setValue(getName());
if (update) {
update();
}
boolean result = update;
update = false;
return result;
}
private boolean updateOtherMetadata(String label, String data) {
List<String> strings = getOtherMetadata().get(label);
boolean update;
if (strings == null) {
update = StringUtils.isNotBlank(data);
} else {
update = !strings.contains(data);
}
if (update) {
this.update = true;
}
return update;
}
private boolean updateValue(String incoming, String resident) {
boolean update = StringUtils.isNotBlank(incoming) && !incoming.equals(resident);
if (update) {
this.update = true;
}
return update;
}
@Override
public Map<String, String> choiceSelectMap() {
Map<String, String> map = super.choiceSelectMap();
map.put("orcid", getOrcid_id());
return map;
}
@Override
public String getAuthorityType() {
return "orcid";
}
@Override
public String generateString() {
String generateString = AuthorityValueServiceImpl.GENERATE + getAuthorityType() + AuthorityValueServiceImpl
.SPLIT;
if (StringUtils.isNotBlank(getOrcid_id())) {
generateString += getOrcid_id();
}
return generateString;
}
@Override
public AuthorityValue newInstance(String info) {
AuthorityValue authorityValue = null;
if (StringUtils.isNotBlank(info)) {
Orcid orcid = Orcid.getOrcid();
authorityValue = orcid.queryAuthorityID(info);
} else {
authorityValue = OrcidAuthorityValue.create();
}
return authorityValue;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
OrcidAuthorityValue that = (OrcidAuthorityValue) o;
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
return orcid_id != null ? orcid_id.hashCode() : 0;
}
@Override
public boolean hasTheSameInformationAs(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.hasTheSameInformationAs(o)) {
return false;
}
OrcidAuthorityValue that = (OrcidAuthorityValue) o;
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
return false;
}
for (String key : otherMetadata.keySet()) {
if (otherMetadata.get(key) != null) {
List<String> metadata = otherMetadata.get(key);
List<String> otherMetadata = that.otherMetadata.get(key);
if (otherMetadata == null) {
return false;
} else {
HashSet<String> metadataSet = new HashSet<String>(metadata);
HashSet<String> otherMetadataSet = new HashSet<String>(otherMetadata);
if (!metadataSet.equals(otherMetadataSet)) {
return false;
}
}
} else {
if (that.otherMetadata.get(key) != null) {
return false;
}
}
}
return true;
}
}

View File

@@ -0,0 +1,191 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.SolrAuthorityInterface;
import org.dspace.authority.orcid.xml.XMLtoBio;
import org.dspace.authority.rest.RESTConnector;
import org.json.JSONObject;
import org.orcid.jaxb.model.record_v2.Person;
/**
* @author Jonas Van Goolen (jonas at atmire dot com)
* This class contains all methods for retrieving "Person" objects calling the ORCID (version 2) endpoints.
* Additionally, this can also create AuthorityValues based on these returned Person objects
*/
public class Orcidv2 implements SolrAuthorityInterface {
private static Logger log = LogManager.getLogger(Orcidv2.class);
public RESTConnector restConnector;
private String OAUTHUrl;
private String clientId;
private String clientSecret;
private String accessToken;
/**
* Initialize the accessToken that is required for all subsequent calls to ORCID.
*
* @throws java.io.IOException passed through from HTTPclient.
*/
public void init() throws IOException {
if (StringUtils.isNotBlank(accessToken) && StringUtils.isNotBlank(clientSecret)) {
String authenticationParameters = "?client_id=" + clientId +
"&client_secret=" + clientSecret +
"&scope=/read-public&grant_type=client_credentials";
HttpPost httpPost = new HttpPost(OAUTHUrl + authenticationParameters);
httpPost.addHeader("Accept", "application/json");
httpPost.addHeader("Content-Type", "application/x-www-form-urlencoded");
HttpClient httpClient = HttpClientBuilder.create().build();
HttpResponse getResponse = httpClient.execute(httpPost);
InputStream is = getResponse.getEntity().getContent();
BufferedReader streamReader = new BufferedReader(new InputStreamReader(is, "UTF-8"));
JSONObject responseObject = null;
String inputStr;
while ((inputStr = streamReader.readLine()) != null && responseObject == null) {
if (inputStr.startsWith("{") && inputStr.endsWith("}") && inputStr.contains("access_token")) {
try {
responseObject = new JSONObject(inputStr);
} catch (Exception e) {
//Not as valid as I'd hoped, move along
responseObject = null;
}
}
}
if (responseObject != null && responseObject.has("access_token")) {
accessToken = (String) responseObject.get("access_token");
}
}
}
/**
* Makes an instance of the Orcidv2 class based on the provided parameters.
* This constructor is called through the spring bean initialization
*/
private Orcidv2(String url, String OAUTHUrl, String clientId, String clientSecret) {
this.restConnector = new RESTConnector(url);
this.OAUTHUrl = OAUTHUrl;
this.clientId = clientId;
this.clientSecret = clientSecret;
}
/**
* Makes an instance of the Orcidv2 class based on the provided parameters.
* This constructor is called through the spring bean initialization
*/
private Orcidv2(String url) {
this.restConnector = new RESTConnector(url);
}
/**
* Makes an instance of the AuthorityValue with the given information.
* @param text search string
* @return List<AuthorityValue>
*/
@Override
public List<AuthorityValue> queryAuthorities(String text, int max) {
List<Person> bios = queryBio(text, max);
List<AuthorityValue> result = new ArrayList<>();
for (Person person : bios) {
AuthorityValue orcidAuthorityValue = Orcidv2AuthorityValue.create(person);
if (orcidAuthorityValue != null) {
result.add(orcidAuthorityValue);
}
}
return result;
}
/**
* Create an AuthorityValue from a Person retrieved using the given orcid identifier.
* @param id orcid identifier
* @return AuthorityValue
*/
public AuthorityValue queryAuthorityID(String id) {
Person person = getBio(id);
AuthorityValue valueFromPerson = Orcidv2AuthorityValue.create(person);
return valueFromPerson;
}
/**
* Retrieve a Person object based on a given orcid identifier
* @param id orcid identifier
* @return Person
*/
public Person getBio(String id) {
log.debug("getBio called with ID=" + id);
if (!isValid(id)) {
return null;
}
InputStream bioDocument = restConnector.get(id + ((id.endsWith("/person")) ? "" : "/person"), accessToken);
XMLtoBio converter = new XMLtoBio();
Person person = converter.convertSinglePerson(bioDocument);
return person;
}
/**
* Retrieve a list of Person objects.
* @param text search string
* @param start offset to use
* @param rows how many rows to return
* @return List<Person>
*/
public List<Person> queryBio(String text, int start, int rows) {
if (rows > 100) {
throw new IllegalArgumentException("The maximum number of results to retrieve cannot exceed 100.");
}
String searchPath = "search?q=" + URLEncoder.encode(text) + "&start=" + start + "&rows=" + rows;
log.debug("queryBio searchPath=" + searchPath + " accessToken=" + accessToken);
InputStream bioDocument = restConnector.get(searchPath, accessToken);
XMLtoBio converter = new XMLtoBio();
List<Person> bios = converter.convert(bioDocument);
return bios;
}
/**
* Retrieve a list of Person objects.
* @param text search string
* @param max how many rows to return
* @return List<Person>
*/
public List<Person> queryBio(String text, int max) {
return queryBio(text, 0, max);
}
/**
* Check to see if the provided text has the correct ORCID syntax.
* Since only searching on ORCID id is allowed, this way, we filter out any queries that would return a
* blank result anyway
*/
private boolean isValid(String text) {
return StringUtils.isNotBlank(text) && text.matches(Orcidv2AuthorityValue.ORCID_ID_SYNTAX);
}
}

View File

@@ -0,0 +1,342 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.apache.commons.lang3.StringUtils;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrInputDocument;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.AuthorityValueServiceImpl;
import org.dspace.authority.PersonAuthorityValue;
import org.dspace.utils.DSpace;
import org.orcid.jaxb.model.common_v2.ExternalId;
import org.orcid.jaxb.model.record_v2.ExternalIdentifiers;
import org.orcid.jaxb.model.record_v2.KeywordType;
import org.orcid.jaxb.model.record_v2.NameType;
import org.orcid.jaxb.model.record_v2.Person;
import org.orcid.jaxb.model.record_v2.ResearcherUrlType;
/**
* @author Jonas Van Goolen (jonas at atmire dot com)
*/
public class Orcidv2AuthorityValue extends PersonAuthorityValue {
/*
* The ORCID identifier
*/
private String orcid_id;
/*
* Map containing key-value pairs filled in by "setValues(Person person)".
* This represents all dynamic information of the object.
*/
private Map<String, List<String>> otherMetadata = new HashMap<String, List<String>>();
/**
* The syntax that the ORCID id needs to conform to
*/
public static final String ORCID_ID_SYNTAX = "\\d{4}-\\d{4}-\\d{4}-(\\d{3}X|\\d{4})";
/**
* Creates an instance of Orcidv2AuthorityValue with only uninitialized fields.
* This is meant to be filled in with values from an existing record.
* To create a brand new Orcidv2AuthorityValue, use create()
*/
public Orcidv2AuthorityValue() {
}
public Orcidv2AuthorityValue(SolrDocument document) {
super(document);
}
public String getOrcid_id() {
return orcid_id;
}
public void setOrcid_id(String orcid_id) {
this.orcid_id = orcid_id;
}
/**
* Create an empty authority.
* @return OrcidAuthorityValue
*/
public static Orcidv2AuthorityValue create() {
Orcidv2AuthorityValue orcidAuthorityValue = new Orcidv2AuthorityValue();
orcidAuthorityValue.setId(UUID.randomUUID().toString());
orcidAuthorityValue.updateLastModifiedDate();
orcidAuthorityValue.setCreationDate(new Date());
return orcidAuthorityValue;
}
/**
* Create an authority based on a given orcid bio
* @return OrcidAuthorityValue
*/
public static Orcidv2AuthorityValue create(Person person) {
if (person == null) {
return null;
}
Orcidv2AuthorityValue authority = Orcidv2AuthorityValue.create();
authority.setValues(person);
return authority;
}
/**
* Initialize this instance based on a Person object
* @param person Person
*/
protected void setValues(Person person) {
NameType name = person.getName();
if (!StringUtils.equals(name.getPath(), this.getOrcid_id())) {
this.setOrcid_id(name.getPath());
}
if (!StringUtils.equals(name.getFamilyName().getValue(), this.getLastName())) {
this.setLastName(name.getFamilyName().getValue());
}
if (!StringUtils.equals(name.getGivenNames().getValue(), this.getFirstName())) {
this.setFirstName(name.getGivenNames().getValue());
}
if (name.getCreditName() != null && StringUtils.isNotBlank(name.getCreditName().getValue())) {
if (!this.getNameVariants().contains(name.getCreditName().getValue())) {
this.addNameVariant(name.getCreditName().getValue());
}
}
if (person.getKeywords() != null) {
for (KeywordType keyword : person.getKeywords().getKeyword()) {
if (this.isNewMetadata("keyword", keyword.getContent())) {
this.addOtherMetadata("keyword", keyword.getContent());
}
}
}
ExternalIdentifiers externalIdentifiers = person.getExternalIdentifiers();
if (externalIdentifiers != null) {
for (ExternalId externalIdentifier : externalIdentifiers.getExternalIdentifier()) {
if (this.isNewMetadata("external_identifier", externalIdentifier.getExternalIdValue())) {
this.addOtherMetadata("external_identifier", externalIdentifier.getExternalIdValue());
}
}
}
if (person.getResearcherUrls() != null) {
for (ResearcherUrlType researcherUrl : person.getResearcherUrls().getResearcherUrl()) {
if (this.isNewMetadata("researcher_url", researcherUrl.getUrl().getValue())) {
this.addOtherMetadata("researcher_url", researcherUrl.getUrl().getValue());
}
}
}
if (person.getBiography() != null) {
if (this.isNewMetadata("biography", person.getBiography().getContent())) {
this.addOtherMetadata("biography", person.getBiography().getContent());
}
}
this.setValue(this.getName());
}
/**
* Makes an instance of the AuthorityValue with the given information.
* @param info string info
* @return AuthorityValue
*/
@Override
public AuthorityValue newInstance(String info) {
AuthorityValue authorityValue = null;
if (StringUtils.isNotBlank(info)) {
Orcidv2 orcid = new DSpace().getServiceManager().getServiceByName("AuthoritySource", Orcidv2.class);
authorityValue = orcid.queryAuthorityID(info);
} else {
authorityValue = this.create();
}
return authorityValue;
}
@Override
public void setValue(String value) {
super.setValue(value);
}
/**
* Check to see if the provided label / data pair is already present in the "otherMetadata" or not
* */
public boolean isNewMetadata(String label, String data) {
List<String> strings = getOtherMetadata().get(label);
boolean update;
if (strings == null) {
update = StringUtils.isNotBlank(data);
} else {
update = !strings.contains(data);
}
return update;
}
/**
* Add additional metadata to the otherMetadata map*/
public void addOtherMetadata(String label, String data) {
List<String> strings = otherMetadata.get(label);
if (strings == null) {
strings = new ArrayList<>();
}
strings.add(data);
otherMetadata.put(label, strings);
}
public Map<String, List<String>> getOtherMetadata() {
return otherMetadata;
}
/**
* Generate a solr record from this instance
* @return SolrInputDocument
*/
@Override
public SolrInputDocument getSolrInputDocument() {
SolrInputDocument doc = super.getSolrInputDocument();
if (StringUtils.isNotBlank(getOrcid_id())) {
doc.addField("orcid_id", getOrcid_id());
}
for (String t : otherMetadata.keySet()) {
List<String> data = otherMetadata.get(t);
for (String data_entry : data) {
doc.addField("label_" + t, data_entry);
}
}
return doc;
}
/**
* Information that can be used the choice ui
* @return map
*/
@Override
public Map<String, String> choiceSelectMap() {
Map<String, String> map = super.choiceSelectMap();
String orcid_id = getOrcid_id();
if (StringUtils.isNotBlank(orcid_id)) {
map.put("orcid", orcid_id);
}
return map;
}
@Override
public String getAuthorityType() {
return "orcid";
}
/**
* Provides a string that will allow this AuthorityType to be recognized and provides information to create a new
* instance to be created using public Orcidv2AuthorityValue newInstance(String info).
* @return see {@link org.dspace.authority.service.AuthorityValueService#GENERATE AuthorityValueService.GENERATE}
*/
@Override
public String generateString() {
String generateString = AuthorityValueServiceImpl.GENERATE + getAuthorityType() +
AuthorityValueServiceImpl.SPLIT;
if (StringUtils.isNotBlank(getOrcid_id())) {
generateString += getOrcid_id();
}
return generateString;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Orcidv2AuthorityValue that = (Orcidv2AuthorityValue) o;
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
return orcid_id != null ? orcid_id.hashCode() : 0;
}
/**
* The regular equals() only checks if both AuthorityValues describe the same authority.
* This method checks if the AuthorityValues have different information
* E.g. it is used to decide when lastModified should be updated.
* @param o object
* @return true or false
*/
@Override
public boolean hasTheSameInformationAs(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.hasTheSameInformationAs(o)) {
return false;
}
Orcidv2AuthorityValue that = (Orcidv2AuthorityValue) o;
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
return false;
}
for (String key : otherMetadata.keySet()) {
if (otherMetadata.get(key) != null) {
List<String> metadata = otherMetadata.get(key);
List<String> otherMetadata = that.otherMetadata.get(key);
if (otherMetadata == null) {
return false;
} else {
HashSet<String> metadataSet = new HashSet<String>(metadata);
HashSet<String> otherMetadataSet = new HashSet<String>(otherMetadata);
if (!metadataSet.equals(otherMetadataSet)) {
return false;
}
}
} else {
if (that.otherMetadata.get(key) != null) {
return false;
}
}
}
return true;
}
}

View File

@@ -1,112 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
import java.util.LinkedHashSet;
import java.util.Set;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Bio {
protected String orcid;
protected BioName name;
protected String country;
protected Set<String> keywords;
protected Set<BioExternalIdentifier> bioExternalIdentifiers;
protected Set<BioResearcherUrl> researcherUrls;
protected String biography;
public Bio() {
this.name = new BioName();
keywords = new LinkedHashSet<String>();
bioExternalIdentifiers = new LinkedHashSet<BioExternalIdentifier>();
researcherUrls = new LinkedHashSet<BioResearcherUrl>();
}
public String getOrcid() {
return orcid;
}
public void setOrcid(String orcid) {
this.orcid = orcid;
}
public BioName getName() {
return name;
}
public void setName(BioName name) {
this.name = name;
}
public String getCountry() {
return country;
}
public void setCountry(String country) {
this.country = country;
}
public Set<String> getKeywords() {
return keywords;
}
public void addKeyword(String keyword) {
this.keywords.add(keyword);
}
public Set<BioExternalIdentifier> getBioExternalIdentifiers() {
return bioExternalIdentifiers;
}
public void addExternalIdentifier(BioExternalIdentifier externalReference) {
bioExternalIdentifiers.add(externalReference);
}
public Set<BioResearcherUrl> getResearcherUrls() {
return researcherUrls;
}
public void addResearcherUrl(BioResearcherUrl researcherUrl) {
researcherUrls.add(researcherUrl);
}
public String getBiography() {
return biography;
}
public void setBiography(String biography) {
this.biography = biography;
}
@Override
public String toString() {
return "Bio{" +
"orcid='" + orcid + '\'' +
", name=" + name +
", country='" + country + '\'' +
", keywords=" + keywords +
", bioExternalIdentifiers=" + bioExternalIdentifiers +
", researcherUrls=" + researcherUrls +
", biography='" + biography + '\'' +
'}';
}
}

View File

@@ -1,108 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class BioExternalIdentifier {
protected String id_orcid;
protected String id_common_name;
protected String id_reference;
protected String id_url;
public BioExternalIdentifier(String id_orcid, String id_common_name, String id_reference, String id_url) {
this.id_orcid = id_orcid;
this.id_common_name = id_common_name;
this.id_reference = id_reference;
this.id_url = id_url;
}
public String getId_orcid() {
return id_orcid;
}
public void setId_orcid(String id_orcid) {
this.id_orcid = id_orcid;
}
public String getId_common_name() {
return id_common_name;
}
public void setId_common_name(String id_common_name) {
this.id_common_name = id_common_name;
}
public String getId_reference() {
return id_reference;
}
public void setId_reference(String id_reference) {
this.id_reference = id_reference;
}
public String getId_url() {
return id_url;
}
public void setId_url(String id_url) {
this.id_url = id_url;
}
@Override
public String toString() {
return "BioExternalIdentifier{" +
"id_orcid='" + id_orcid + '\'' +
", id_common_name='" + id_common_name + '\'' +
", id_reference='" + id_reference + '\'' +
", id_url='" + id_url + '\'' +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BioExternalIdentifier that = (BioExternalIdentifier) o;
if (id_common_name != null ? !id_common_name.equals(that.id_common_name) : that.id_common_name != null) {
return false;
}
if (id_orcid != null ? !id_orcid.equals(that.id_orcid) : that.id_orcid != null) {
return false;
}
if (id_reference != null ? !id_reference.equals(that.id_reference) : that.id_reference != null) {
return false;
}
if (id_url != null ? !id_url.equals(that.id_url) : that.id_url != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = id_orcid != null ? id_orcid.hashCode() : 0;
result = 31 * result + (id_common_name != null ? id_common_name.hashCode() : 0);
result = 31 * result + (id_reference != null ? id_reference.hashCode() : 0);
result = 31 * result + (id_url != null ? id_url.hashCode() : 0);
return result;
}
}

View File

@@ -1,114 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
import java.util.ArrayList;
import java.util.List;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class BioName {
protected String givenNames;
protected String familyName;
protected String creditName;
protected List<String> otherNames;
BioName() {
otherNames = new ArrayList<String>();
}
BioName(String givenNames, String familyName, String creditName, List<String> otherNames) {
this.givenNames = givenNames;
this.familyName = familyName;
this.creditName = creditName;
this.otherNames = otherNames;
}
public String getGivenNames() {
return givenNames;
}
public void setGivenNames(String givenNames) {
this.givenNames = givenNames;
}
public String getFamilyName() {
return familyName;
}
public void setFamilyName(String familyName) {
this.familyName = familyName;
}
public String getCreditName() {
return creditName;
}
public void setCreditName(String creditName) {
this.creditName = creditName;
}
public List<String> getOtherNames() {
return otherNames;
}
public void setOtherNames(List<String> otherNames) {
this.otherNames = otherNames;
}
@Override
public String toString() {
return "BioName{" +
"givenNames='" + givenNames + '\'' +
", familyName='" + familyName + '\'' +
", creditName='" + creditName + '\'' +
", otherNames=" + otherNames +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BioName bioName = (BioName) o;
if (creditName != null ? !creditName.equals(bioName.creditName) : bioName.creditName != null) {
return false;
}
if (familyName != null ? !familyName.equals(bioName.familyName) : bioName.familyName != null) {
return false;
}
if (givenNames != null ? !givenNames.equals(bioName.givenNames) : bioName.givenNames != null) {
return false;
}
if (otherNames != null ? !otherNames.equals(bioName.otherNames) : bioName.otherNames != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = givenNames != null ? givenNames.hashCode() : 0;
result = 31 * result + (familyName != null ? familyName.hashCode() : 0);
result = 31 * result + (creditName != null ? creditName.hashCode() : 0);
result = 31 * result + (otherNames != null ? otherNames.hashCode() : 0);
return result;
}
}

View File

@@ -1,77 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class BioResearcherUrl {
protected String name;
protected String url;
public BioResearcherUrl(String name, String url) {
this.name = name;
this.url = url;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
@Override
public String toString() {
return "BioResearcherUrl{" +
"name='" + name + '\'' +
", url='" + url + '\'' +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BioResearcherUrl that = (BioResearcherUrl) o;
if (name != null ? !name.equals(that.name) : that.name != null) {
return false;
}
if (url != null ? !url.equals(that.url) : that.url != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = name != null ? name.hashCode() : 0;
result = 31 * result + (url != null ? url.hashCode() : 0);
return result;
}
}

View File

@@ -1,49 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Citation {
private CitationType type;
private String citation;
public Citation(CitationType type, String citation) {
this.type = type;
this.citation = citation;
}
public CitationType getType() {
return type;
}
public void setType(CitationType type) {
this.type = type;
}
public String getCitation() {
return citation;
}
public void setCitation(String citation) {
this.citation = citation;
}
@Override
public String toString() {
return "Citation{" +
"type=" + type +
", citation='" + citation + '\'' +
'}';
}
}

View File

@@ -1,28 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public enum CitationType {
FORMATTED_UNSPECIFIED,
BIBTEX,
FORMATTED_APA,
FORMATTED_HARVARD,
FORMATTED_IEEE,
FORMATTED_MLA,
FORMATTED_VANCOUVER,
FORMATTED_CHICAGO
}

View File

@@ -1,111 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
import java.util.Set;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Contributor {
private String orcid;
private String creditName;
private String email;
private Set<ContributorAttribute> contributorAttributes;
public Contributor(String orcid, String creditName, String email, Set<ContributorAttribute> contributorAttributes) {
this.orcid = orcid;
this.creditName = creditName;
this.email = email;
this.contributorAttributes = contributorAttributes;
}
public String getOrcid() {
return orcid;
}
public void setOrcid(String orcid) {
this.orcid = orcid;
}
public String getCreditName() {
return creditName;
}
public void setCreditName(String creditName) {
this.creditName = creditName;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public Set<ContributorAttribute> getContributorAttributes() {
return contributorAttributes;
}
public void setContributorAttributes(Set<ContributorAttribute> contributorAttributes) {
this.contributorAttributes = contributorAttributes;
}
@Override
public String toString() {
return "Contributor{" +
"orcid='" + orcid + '\'' +
", creditName='" + creditName + '\'' +
", email='" + email + '\'' +
", contributorAttributes=" + contributorAttributes +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Contributor that = (Contributor) o;
if (contributorAttributes != null ? !contributorAttributes
.equals(that.contributorAttributes) : that.contributorAttributes != null) {
return false;
}
if (creditName != null ? !creditName.equals(that.creditName) : that.creditName != null) {
return false;
}
if (email != null ? !email.equals(that.email) : that.email != null) {
return false;
}
if (orcid != null ? !orcid.equals(that.orcid) : that.orcid != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = orcid != null ? orcid.hashCode() : 0;
result = 31 * result + (creditName != null ? creditName.hashCode() : 0);
result = 31 * result + (email != null ? email.hashCode() : 0);
result = 31 * result + (contributorAttributes != null ? contributorAttributes.hashCode() : 0);
return result;
}
}

View File

@@ -1,78 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class ContributorAttribute {
private ContributorAttributeRole role;
private ContributorAttributeSequence sequence;
public ContributorAttribute(ContributorAttributeRole role, ContributorAttributeSequence sequence) {
this.role = role;
this.sequence = sequence;
}
public ContributorAttributeRole getRole() {
return role;
}
public void setRole(ContributorAttributeRole role) {
this.role = role;
}
public ContributorAttributeSequence getSequence() {
return sequence;
}
public void setSequence(ContributorAttributeSequence sequence) {
this.sequence = sequence;
}
@Override
public String toString() {
return "ContributorAttribute{" +
"role=" + role +
", sequence=" + sequence +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ContributorAttribute that = (ContributorAttribute) o;
if (role != that.role) {
return false;
}
if (sequence != that.sequence) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = role != null ? role.hashCode() : 0;
result = 31 * result + (sequence != null ? sequence.hashCode() : 0);
return result;
}
}

View File

@@ -1,32 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* http://support.orcid.org/knowledgebase/articles/118843-anatomy-of-a-contributor
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public enum ContributorAttributeRole {
AUTHOR,
ASSIGNEE,
EDITOR,
CHAIR_OR_TRANSLATOR,
CO_INVESTIGATOR,
CO_INVENTOR,
GRADUATE_STUDENT,
OTHER_INVENTOR,
PRINCIPAL_INVESTIGATOR,
POSTDOCTORAL_RESEARCHER,
SUPPORT_STAFF
}

View File

@@ -1,23 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* http://support.orcid.org/knowledgebase/articles/118843-anatomy-of-a-contributor
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public enum ContributorAttributeSequence {
FIRST,
ADDITIONAL
}

View File

@@ -1,116 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
import java.util.Set;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Work {
private WorkTitle workTitle;
private String description;
private Citation citation;
private WorkType workType;
private String publicationDate;
private WorkExternalIdentifier workExternalIdentifier;
private String url;
private Set<Contributor> contributors;
private String workSource;
public WorkTitle getWorkTitle() {
return workTitle;
}
public void setWorkTitle(WorkTitle workTitle) {
this.workTitle = workTitle;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Citation getCitation() {
return citation;
}
public void setCitation(Citation citation) {
this.citation = citation;
}
public WorkType getWorkType() {
return workType;
}
public void setWorkType(WorkType workType) {
this.workType = workType;
}
public String getPublicationDate() {
return publicationDate;
}
public void setPublicationDate(String publicationDate) {
this.publicationDate = publicationDate;
}
public WorkExternalIdentifier getWorkExternalIdentifier() {
return workExternalIdentifier;
}
public void setWorkExternalIdentifier(WorkExternalIdentifier workExternalIdentifier) {
this.workExternalIdentifier = workExternalIdentifier;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public Set<Contributor> getContributors() {
return contributors;
}
public void setContributors(Set<Contributor> contributors) {
this.contributors = contributors;
}
public String getWorkSource() {
return workSource;
}
public void setWorkSource(String workSource) {
this.workSource = workSource;
}
@Override
public String toString() {
return "Work{" +
"workTitle=" + workTitle +
", description='" + description + '\'' +
", citation=" + citation +
", workType=" + workType +
", publicationDate='" + publicationDate + '\'' +
", workExternalIdentifier=" + workExternalIdentifier +
", url='" + url + '\'' +
", contributors=" + contributors +
", workSource='" + workSource + '\'' +
'}';
}
}

View File

@@ -1,73 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* http://support.orcid.org/knowledgebase/articles/118807
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class WorkExternalIdentifier {
private WorkExternalIdentifierType workExternalIdentifierType;
private String workExternalIdenfitierID;
public WorkExternalIdentifier(WorkExternalIdentifierType workExternalIdentifierType,
String workExternalIdenfitierID) {
this.workExternalIdentifierType = workExternalIdentifierType;
this.workExternalIdenfitierID = workExternalIdenfitierID;
}
public WorkExternalIdentifierType getWorkExternalIdentifierType() {
return workExternalIdentifierType;
}
public void setWorkExternalIdentifierType(WorkExternalIdentifierType workExternalIdentifierType) {
this.workExternalIdentifierType = workExternalIdentifierType;
}
@Override
public String toString() {
return "WorkExternalIdentifier{" +
"workExternalIdentifierType=" + workExternalIdentifierType +
", workExternalIdenfitierID='" + workExternalIdenfitierID + '\'' +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
WorkExternalIdentifier that = (WorkExternalIdentifier) o;
if (workExternalIdenfitierID != null ? !workExternalIdenfitierID
.equals(that.workExternalIdenfitierID) : that.workExternalIdenfitierID != null) {
return false;
}
if (workExternalIdentifierType != that.workExternalIdentifierType) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = workExternalIdentifierType != null ? workExternalIdentifierType.hashCode() : 0;
result = 31 * result + (workExternalIdenfitierID != null ? workExternalIdenfitierID.hashCode() : 0);
return result;
}
}

View File

@@ -1,42 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* http://support.orcid.org/knowledgebase/articles/118807
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public enum WorkExternalIdentifierType {
// OTHER_ID,
ARXIV,
ASIN,
ASIN_TLD,
BIBCODE,
DOI,
EID,
ISBN,
ISSN,
JFM,
JSTOR,
LCCN,
MR,
OCLC,
OL,
OSTI,
PMC,
PMID,
RFC,
SSRN,
ZBL
}

View File

@@ -1,64 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
import java.util.Map;
/**
* http://support.orcid.org/knowledgebase/articles/118807
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class WorkTitle {
private String title;
private String subtitle;
private Map<String, String> translatedTitles;
public WorkTitle(String title, String subtitle, Map<String, String> translatedTitles) {
this.title = title;
this.subtitle = subtitle;
this.translatedTitles = translatedTitles;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getSubtitle() {
return subtitle;
}
public void setSubtitle(String subtitle) {
this.subtitle = subtitle;
}
public String getTranslatedTitles(String languageCode) {
return translatedTitles.get(languageCode);
}
public void setTranslatedTitle(String languageCode, String translatedTitle) {
translatedTitles.put(languageCode, translatedTitle);
}
@Override
public String toString() {
return "WorkTitle{" +
"title='" + title + '\'' +
", subtitle='" + subtitle + '\'' +
", translatedTitles=" + translatedTitles +
'}';
}
}

View File

@@ -1,57 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* http://support.orcid.org/knowledgebase/articles/118795
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public enum WorkType {
BOOK,
BOOK_CHAPTER,
BOOK_REVIEW,
DICTIONARY_ENTRY,
DISSERTATION,
ENCYCLOPEDIA_ARTICLE,
EDITED_BOOK,
JOURNAL_ARTICLE,
JOURNAL_ISSUE,
MAGAZINE_ARTICLE,
MANUAL,
ONLINE_RESOURCE,
NEWSLETTER_ARTICLE,
NEWSPAPER_ARTICLE,
REPORT,
RESEARCH_TOOL,
SUPERVISED_STUDENT_PUBLICATION,
TEST,
TRANSLATION,
WEBSITE,
CONFERENCE_ABSTRACT,
CONFERENCE_PAPER,
CONFERENCE_POSTER,
DISCLOSURE,
LICENSE,
PATENT,
REGISTERED_COPYRIGHT,
ARTISTIC_PERFORMANCE,
DATA_SET,
INVENTION,
LECTURE_SPEECH,
RESEARCH_TECHNIQUE,
SPIN_OFF_COMPANY,
STANDARDS_AND_POLICY,
TECHNICAL_STANDARD,
OTHER
}

View File

@@ -7,8 +7,15 @@
*/
package org.dspace.authority.orcid.xml;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import java.io.InputStream;
import java.net.URISyntaxException;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
import org.apache.logging.log4j.Logger;
import org.xml.sax.SAXException;
/**
* @param <T> type
@@ -22,13 +29,17 @@ public abstract class Converter<T> {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(Converter.class);
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(Converter.class);
public abstract T convert(InputStream document);
protected void processError(Document xml) {
String errorMessage = XMLErrors.getErrorMessage(xml);
log.error("The orcid-message reports an error: " + errorMessage);
protected Object unmarshall(InputStream input, Class<?> type) throws SAXException, URISyntaxException {
try {
JAXBContext context = JAXBContext.newInstance(type);
Unmarshaller unmarshaller = context.createUnmarshaller();
return unmarshaller.unmarshal(input);
} catch (JAXBException e) {
throw new RuntimeException("Unable to unmarshall orcid message" + e);
}
}
public abstract T convert(Document document);
}

View File

@@ -1,77 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.xml;
import javax.xml.xpath.XPathExpressionException;
import org.apache.log4j.Logger;
import org.dspace.authority.util.XMLUtils;
import org.w3c.dom.Document;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class XMLErrors {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(XMLErrors.class);
private static final String ERROR_DESC = "/orcid-message/error-desc";
/**
* Default constructor
*/
private XMLErrors() { }
/**
* Evaluates whether a given xml document contains errors or not.
*
* @param xml The given xml document
* @return true if the given xml document is null
* or if it contains errors
*/
public static boolean check(Document xml) {
if (xml == null) {
return true;
}
String textContent = null;
try {
textContent = XMLUtils.getTextContent(xml, ERROR_DESC);
} catch (XPathExpressionException e) {
log.error("Error while checking for errors in orcid message", e);
}
return textContent == null;
}
public static String getErrorMessage(Document xml) {
if (xml == null) {
return "Did not receive an XML document.";
}
String textContent = null;
try {
textContent = XMLUtils.getTextContent(xml, ERROR_DESC);
} catch (XPathExpressionException e) {
log.error("Error while checking for errors in orcid message", e);
}
return textContent;
}
}

Some files were not shown because too many files have changed in this diff Show More