Compare commits

...

489 Commits

Author SHA1 Message Date
Kim Shepherd
813800ce17 [maven-release-plugin] prepare release dspace-6.3 2018-06-26 17:58:16 +12:00
Tim Donohue
a91cde6cbc Merge pull request #2099 from kshepherd/DS-3936_bower_registry_needs_updating
DS-3936 bower registry needs updating (blocker)
2018-06-25 13:48:44 -05:00
Kim Shepherd
1cd1ccc2ab fix typo in json bowerrc 2018-06-24 23:08:16 +00:00
kshepherd
b5f43835b4 Merge pull request #2097 from kshepherd/dspace-6_x
Regenerated LICENSES_THIRD_PARTY file with a few manual tidy-ups
2018-06-25 11:00:21 +12:00
Kim Shepherd
558b36c11d update bower registry to https://registry.bower.io as per official instructions 2018-06-24 22:53:55 +00:00
Kim Shepherd
5791a5cec0 manually switch pgsql jdbc driver from BSD-2-Clause to PostgreSQL License to match past license descriptions / be more descriptive (though it is bsd-2-clause under the hood, according to github) 2018-06-24 22:31:19 +00:00
Kim Shepherd
c0072f19e4 Updated third party licenses in preparation for 6.3 release 2018-06-24 22:23:31 +00:00
kshepherd
6433ce04c4 Merge pull request #2096 from kshepherd/dspace-6_x
Update copyright year range in LICENSE file in preparation for 6.3 release
2018-06-25 10:06:26 +12:00
Kim Shepherd
9353b0a42c Update copyright year range in LICENSE file in preparation for 6.3 release 2018-06-24 22:01:52 +00:00
Tim Donohue
9f80c48e15 Merge pull request #2095 from ItsNotYou/dspace-6_x
DS-3933 Updated Pubmed endpoints from http:// to https://.
2018-06-21 10:29:36 -05:00
Hendrik Geßner
4fbf330de9 DS-3933 Updated Pubmed endpoints from http:// to https://. 2018-06-21 15:22:00 +02:00
kshepherd
adfaae33fd Merge pull request #2082 from mwoodiupui/DS-3795-6x
[DS-3795] Manage versions of some buggy transitive dependencies.
2018-06-21 12:37:48 +12:00
kshepherd
b259dbecc6 Merge pull request #1999 from atmire/DS-3870-S3-bitstore-leaves-connections-to-AWS-open
DS-3870 S3 bitstore leaves connections to AWS open
2018-06-21 12:27:10 +12:00
kshepherd
da0c6e6bda Merge pull request #2080 from AndrewZWood/DS-3930
DS-3930 Remove extra space
2018-06-21 08:18:16 +12:00
kshepherd
5aaebc3b73 Merge pull request #2079 from tdonohue/reapply-DS-3710
DS-3710 Fix ehcache config conflict (Reapply to 6.x)
2018-06-21 08:08:04 +12:00
Mark H. Wood
ade12e54d9 [DS-3795] Manage versions of some buggy transitive dependencies. 2018-06-15 17:28:39 -04:00
Chris Herron
d9f4aa6da6 DS-3710 Fix ehcache config conflict 2018-06-13 16:04:17 +00:00
kshepherd
ff6252d660 Merge pull request #1910 from wgresshoff/DS-3310
Fix authentication problem in SwordV2 implementation (DS-3310).
2018-06-13 11:28:12 +12:00
Andrew Wood
dd61128b14 DS-3930 Remove extra space 2018-06-08 14:52:45 -04:00
kshepherd
c01823119d Merge pull request #1973 from antzsch/dspace-6_x
DS-3856 - foreignkey-constraint community2community_child_comm_id_fkey
2018-06-01 11:13:48 +12:00
Mark H. Wood
02d348064f Merge pull request #2069 from tdonohue/DS-3498-disable-full-text-snippets
DS-3498 quick fix. Disable full text snippets in search results & add warning
2018-05-30 16:03:00 -04:00
Tim Donohue
06268855ab DS-3498 quick fix. Disable full text snippets in search results & provide warning. 2018-05-22 21:07:26 +00:00
kshepherd
a9d83925cd Merge pull request #2068 from kshepherd/DS-3840
Fixes for DS-3840
2018-05-22 08:40:10 +12:00
kshepherd
7829eca276 Merge pull request #2067 from kshepherd/DS-3866
Fixes for DS-3866
2018-05-22 08:39:33 +12:00
Kim Shepherd
df244882ef Fixes for DS-3480 2018-05-21 11:18:26 +12:00
Kim Shepherd
22baf1e698 fixes for DS-3866 2018-05-21 11:08:23 +12:00
kshepherd
7b2050e1de Merge pull request #1730 from atmire/DS-2675-Browse-by-jump-to-bugfixes-DSpace6
DS-2675: Bugfixing: Jump to value general errors with order (for 6.x)
2018-05-20 14:27:55 +12:00
kshepherd
815568d926 Merge pull request #1770 from edusperoni/controlled-vocabulary-search6
[DS-3616] Fix nested vocabulary search (dspace-6_x)
2018-05-20 14:06:42 +12:00
kshepherd
e7e249709c Merge pull request #1779 from AlexanderS/remove-submission-policies
DS-3522: Ensure Submission Policies are removed in XMLWorkflow
2018-05-20 12:02:36 +12:00
kshepherd
76d3a7e4a1 Merge pull request #1864 from ssolim/DS-3629---Listing-of-all-Groups-misses-pagination
Ds 3629   listing of all groups misses pagination - XMLUI
2018-05-18 21:24:53 +12:00
kshepherd
9193e9a412 Merge pull request #1835 from AlexanderS/authority-indexer
DS-3681: Refactoring of DSpaceAuthorityIndexer
2018-05-17 11:48:42 +12:00
kshepherd
d65babd069 Merge pull request #2026 from KingKrimmson/DS-3894-6_x
[DS-3894] Fix "No enum constant" exception for facet "View More" links
2018-05-17 11:28:48 +12:00
Tim Donohue
4f453430ec Merge pull request #1755 from AlexanderS/jspui-fix-authority-lookup
DS-3404: JSPUI: Fix authority lookup
2018-05-16 11:01:31 -05:00
kshepherd
513533e3e2 Merge pull request #2039 from tdonohue/DS-3447-ORCID-v2-integration-slim
DS-3447 : ORCID v2 integration (using DSpace/orcid-jaxb-api)
2018-05-14 09:50:08 +12:00
kshepherd
5d6282ebfb Merge pull request #2045 from kshepherd/ds-3377_solr_queries_too_long
[DS-3377] Solr queries too long  (change search GET requests to POST)
2018-05-11 12:34:37 +12:00
Tim Donohue
72d9712555 Update POM with newly released version of orcid-jaxb-api 2018-05-10 20:15:36 +00:00
kshepherd
68b6314e8d Merge pull request #1891 from tuub/DS-3769
DS-3769 Set the right hibernate property of …
2018-05-10 09:08:58 +12:00
Pascal-Nicolas Becker
8e0b2c0bbb Merge pull request #2038 from tuub/DS-3507
DS-3507: Fixes the issues with the colon and Solr special characters …
2018-05-09 23:04:03 +02:00
kshepherd
d032784e59 Merge pull request #1890 from tuub/DS-3768
DS-3768 Fixes the harvest solr parse error by
2018-05-10 09:03:41 +12:00
kshepherd
74ab77f1e9 Merge pull request #1838 from eDissPlus/dspace-6_x
DS-3693: Add plugin to index filenames and file descriptions for files in ORIGINAL bundle
2018-05-09 14:39:14 +12:00
Kim Shepherd
9b981d963c [DS-3377] Fix some newline/spacing issues, add log warning for non-string solr parameters encountered 2018-05-09 11:24:41 +12:00
Kim Shepherd
3d2b5bc03a [DS-3377] Ensure post parameters are added to http request (fixes bug found during review) 2018-05-06 11:41:29 +12:00
Terry Brady
b51043ed84 Merge pull request #2049 from Georgetown-University-Libraries/ds3903
At +2.  Merging... [DS-3903] Resolve Jaskson Dependency Issues in 6x REST API
2018-05-04 16:09:46 -04:00
Terry W Brady
79ca53954d upgrade jackson and comment 2018-05-04 11:52:10 -07:00
Terry W Brady
cfb758d8ee update version for synk issues 2018-05-04 11:35:56 -07:00
Terry W Brady
f5306597f4 jackson version issues 2018-05-04 11:01:49 -07:00
Tim Donohue
b8744f3465 Bug fixes to querying for ORCIDs by name 2018-05-04 16:28:59 +00:00
kshepherd
e1b86828b4 Merge pull request #2000 from minurmin/DS-2862
DS-2862 Fix legacy embargo checks on DSpace 6
2018-05-03 18:44:55 +12:00
kshepherd
34a6625e6b Merge pull request #2012 from KingKrimmson/DS-3877-6_x
DS-3877 Trim bitstream name during filter-media comparison
2018-05-03 17:49:23 +12:00
Kim Shepherd
237e06a32e [DS-3377] Explicitly set SolrRequest.METHOD.POST for all regular calls to SolrQuery.query. Removed some debug logging. 2018-05-03 11:26:12 +12:00
Kim Shepherd
2d37722357 [DS-3377] Replace GET with POST in simple 'find dspace object' as well 2018-05-03 10:54:42 +12:00
Kim Shepherd
87300f4108 [DS-3377] Replace GET with POST in simple 'find dspace object' as well 2018-05-03 10:52:29 +12:00
Kim Shepherd
08bf1d8ece [DS-3377] Fixed missing paren in debug logging 2018-05-03 10:43:41 +12:00
Kim Shepherd
c21b2b9899 [DS-3377] Change http client method in SolrServiceImpl to POST instead of GET 2018-05-03 10:40:31 +12:00
Tim Donohue
8c1580bbe5 Fix default ORCID API URL. Move to correct place in dspace.cfg 2018-05-02 19:30:33 +00:00
Tim Donohue
2904680ab0 Merge pull request #1856 from helix84/DS-3705-reference-fix-pagination-6_x
DS-3705 Recent Submissions in Reference theme completely covered up by navigation (6.x)
2018-05-02 10:43:57 -05:00
kshepherd
a5638327fc Merge pull request #2043 from kshepherd/ds-3694_re-add_mirage2_war_exclusion_dspace-6
[DS-3694] Re-add the assembly exclusion for the Mirage 2 war
2018-05-02 14:33:50 +12:00
Kim Shepherd
17ea05f0d0 [DS-3694] Re-add the assembly exclusion for the Mirage 2 war (accidentally removed during port from master PR) 2018-05-02 11:17:29 +12:00
Tim Donohue
bdd252acc5 Remove unnecessary org.orcid.* classes (accidentally included in a previous commit) 2018-04-30 15:45:17 +00:00
Tim Donohue
51ade53ef3 Update/Refactor to use DSpace/orcid-jaxb-api (currently SNAPSHOT version) 2018-04-27 21:28:57 +00:00
Jonas Van Goolen
0cd69ef817 DS-3447 Removal of incorrectly copied <p> tag + re-add accidentally removed license header 2018-04-27 20:31:39 +00:00
Jonas Van Goolen
e768986e37 DS-3447 Removal of more unused ORCID v1 classes + commenting in new classes 2018-04-27 20:31:08 +00:00
Jonas Van Goolen
a3368dc779 DS-3447 Additional licences + Default orcid.api.url 2018-04-27 20:30:43 +00:00
Jonas Van Goolen
8f13958c2a DS-3447 Orcid v2 implementation + removal of obsolete v1 implementation 2018-04-27 20:30:28 +00:00
Marsa Haoua
a47844ef89 DS-3507: Fixes the issues with the colon and Solr special characters at the end of a searching string by escaping them. 2018-04-26 14:33:30 +02:00
kshepherd
4e10c27e84 Merge pull request #2033 from kshepherd/DS-3300_coverpage_size_dspace6x_port
DS-3300: Add option to select citation page format (LETTER or A4) - dspace-6_x port
2018-04-26 09:21:53 +12:00
kshepherd
5f39765960 Merge pull request #1913 from tuub/DS-3556
[DS-3556] Rollback of Xalan from 2.7.2 to 2.7.0 to fix DS-3556 and DS…
2018-04-26 07:49:59 +12:00
kshepherd
a8e2ff8c07 Merge pull request #1877 from christian-scheible/DS-3734
DS-3734 Fixed missing trigger of item last modified date when adding a bitstream
2018-04-26 07:39:37 +12:00
Ilja Sidoroff
9d0d58ab3f Add option to select citation page format (LETTER or A4) 2018-04-23 23:16:58 +00:00
Tim Donohue
d8e80e20c8 Merge pull request #2017 from kshepherd/ds-3788_dspace-6_x_port
drop indexes, update, recreate
2018-04-19 08:44:55 -05:00
Terry Brady
944e030bd4 Merge pull request #1996 from AndrewBennet/DS-3511
DS-3511: Fix HTTP 500 errors on REST API bitstream updates
2018-04-18 20:09:36 -04:00
kshepherd
ec307a1c78 Merge pull request #2003 from mwoodiupui/DS-3832-v6
[DS-3832] GeoIP-API(com.maxmind.geoip:geoip-api) needs to be replaced by GeoIP2 ( com.maxmind.geoip2:geoip2 )
2018-04-19 09:33:00 +12:00
Chris Herron
ed24a9917e Fix "No enum constant" exception for facet "View More" links 2018-04-18 11:14:46 -04:00
marsaoua
746401bfe5 DS-3702 & DS-3703: Rebuild the old behavior of bitstreams during versioning in DSpace 6. Bitstreams are pointing to the same file on the disk and reuse the same internal id of the predecessor item's bitstreams version as long as the new bitstreams do not differ from them. Bitstream's metadata are also duplicated only for the new version of the item. (#1883) 2018-04-17 14:57:53 -05:00
Mark H. Wood
0835822359 [DS-3832] Don't spew stack traces for simple exceptions. 2018-04-17 10:20:05 -04:00
Miika Nurminen
c549d3abca DS-2862 Remove the call to legacy lifter.liftEmbargo, show legacy embargo check warnings only if the policy has no date
Even if the legacy embargo is used, lifting should be based on policies with dates so liftEmbargo call is not needed after all. If the policy has a date it is assumed to determine the embargo, even if the corresponding metadata field is set differently.
2018-04-16 09:33:53 +03:00
kshepherd
26406ec73e Merge pull request #1941 from AlexanderS/fix-xslt-ingestion-crosswalk
[DS-3822] Don't guess XML structure during ingest
2018-04-15 17:51:12 +12:00
kshepherd
ea5d27c65f Merge pull request #1867 from philip-muench/oai-embargo-fix
DS-3707, DS-3715: Fixes to item level embargo/privacy in OAI-PMH
2018-04-15 17:34:56 +12:00
kshepherd
fef9550684 Merge pull request #1914 from atmire/DS-3800
DS-3800 The metadata.hide configuration property does not take into account the boolean value assigned to it.
2018-04-15 12:34:05 +12:00
kshepherd
9edb231be1 Merge pull request #1805 from atmire/DS-3560_6.x
DS-3560 MathJax CDN provider change
2018-04-15 12:07:23 +12:00
Per Broman
8aaa4695b1 [DS-3770] always uncache item after performed curation task for better performance (#1892) 2018-04-11 22:27:26 -05:00
Jacob Brown
70db7006ed drop indexes, update, recreate 2018-04-12 10:18:56 +12:00
kshepherd
da2369229d Merge pull request #1885 from AlexanderS/DS-3756_Submission-Back
DS-3756: Fix "back" on last page of submission
2018-04-12 09:22:50 +12:00
kshepherd
fadb48eb54 Merge pull request #1949 from jonas-atmire/DS-3830-Cache-issue-for-version-creation-link
DS-3830 Item retrieval fallbacks for versioning navigation
2018-04-12 08:56:10 +12:00
kshepherd
483bbd9dc2 Merge pull request #2016 from tdonohue/DS-3883-fixes
DS-3883: Speed up Item summary lists by being smarter about when we load Thumbnails/Bitstreams
2018-04-12 07:28:08 +12:00
Tim Donohue
cf2021aee1 DS-3883: If only including thumbnails, only load the main item thumbnail. 2018-04-10 13:44:24 +00:00
Terry Brady
5ebe6f4b4d Merge pull request #1972 from ssolim/DS-3858-DataCiteConnector-leads-to-org.apache.http.NoHttpResponseException
DS-3858 add HttpRequestRetryHandler to DataCiteCon -- At +2, merging
2018-04-09 14:56:30 -04:00
Tim Donohue
1a62edaefb DS-3883: Don't loop through original bitstreams if only displaying thumbnails 2018-04-09 17:39:01 +00:00
ssolim
ae1920cb43 DS-3858: HttpClientBuilder replaces deprectated
DS-3858: Use HttpClientBuilder as the other methods were deprecated. Commit from pnbecker
2018-04-09 09:42:50 +02:00
kshepherd
8b1cf7d6a4 Merge pull request #2014 from Georgetown-University-Libraries/ds3694
[DS-3694] Clean up EHCache configuration mess (Port to 6x)
2018-04-09 16:27:36 +12:00
Terry W Brady
362b81d2ac update method signature 2018-04-06 16:29:36 -07:00
Terry W Brady
81bcaa47c4 port of PR1940 2018-04-06 16:20:00 -07:00
Chris Herron
b70b170657 DS-3877 Trim bitstream name during filter-media comparison (prevent duplicate bitstream generation) 2018-04-04 09:42:04 -04:00
Mark H. Wood
825b97f6f5 Merge pull request #2006 from kshepherd/pr1804_dspace6_port
Change Content-Type in OAI-Response (DSpace 6 port)
I'm going to go ahead and merge, since it's a port of a patch already merged on another branch.
2018-03-30 14:29:18 -04:00
Mark H. Wood
9aabe46c33 [DS-3832] Clean up more references to v1 database. 2018-03-29 13:27:56 -04:00
Mark H. Wood
9757989336 [DS-3832] Fetch and use GeoLite v2 City database. 2018-03-29 12:46:20 -04:00
Terry Brady
c4aab55e7c Merge pull request #1977 from TAMULib/DS3775-hibernate-session-bug
[DS-3775] Hibernate session bug when submitting item with Versioning enabled
2018-03-28 15:51:30 -07:00
Saiful Amin
b0a1fb7384 Change Content-Type in OAI-Response
As per OAI 2.0 spec (3.1.2.1) the response content type *must* be text/xml.
http://www.openarchives.org/OAI/openarchivesprotocol.html#HTTPResponseFormat

Our OAI client is rejecting the response from DSpace OAI server.
2018-03-28 22:15:41 +00:00
philip-muench
7587d9bd05 Removing debug output 2018-03-28 23:30:05 +02:00
kshepherd
ad4680a26c Merge pull request #1998 from dineshmendhe1/DS-3875
DS-3875: Fix the attribute name of <identifier> tag to 'identifierType'.
2018-03-29 10:15:12 +13:00
kshepherd
999b2f23be Merge pull request #1982 from philip-muench/dspace-6_x
DS-3865: HTML5 Upload Pause Button does not work
2018-03-29 10:09:26 +13:00
Mark H. Wood
f906915879 [DS-3832] Resolve dependency convergence problems. 2018-03-28 13:13:23 -04:00
Mark H. Wood
8be78e6430 [DS-3832] Fix ElasticSearch too. 2018-03-28 12:40:01 -04:00
Mark H. Wood
faf5bd1f94 [DS-3832] Recast test support classes. 2018-03-28 11:56:31 -04:00
Mark H. Wood
4ea7575e4a [DS-3832] Upgrade to GeoIP2. 2018-03-28 11:52:38 -04:00
Kim Shepherd
7bf2f36300 DS-3707, DS-3715: Final name update: removed verbose method name / test number info as per Slack discussion (we won't try to force uniqueness on JUnit assertions) 2018-03-28 10:27:50 +13:00
Kim Shepherd
f354979777 DS-3707, DS-3715: Some update to name text, appending function and test name just to ensure uniqueness (this won't look so ugly if we follow same convention for shorter method names). Line breaks added beneath while blocks. 2018-03-28 10:19:23 +13:00
Kim Shepherd
7b1010cc36 DS-3707, DS-3715: Make test assertion names more descriptive. Added an extra test case to ensure non-discoverable items are not returned by the 'discoverable' version of the find 2018-03-28 10:08:31 +13:00
Kim Shepherd
24ba583921 DS-3707, DS-3715: Add unit tests for existing findInArchiveOrWithdrawnDiscoverableModifiedSince ItemService method and new findInArchiveOrWithdrawnNonDiscoverableModifiedSince ItemService method. Tests ensure that items are returned whether withdrawn or archived, that passing the 'since' timestamp works as expected, and that we get expected results when the item is discoverable and non-discoverable 2018-03-26 10:36:17 +13:00
Miika Nurminen
1f76a54384 DS-2862 Activate legacy embargo lifter, fix conditions on embargo check warnings 2018-03-24 00:58:54 +02:00
benbosman
a3774944ad DS-3870 S3 bitstore leaves connections to AWS open 2018-03-22 16:23:41 +01:00
Dinesh Mendhe
d885ec0d4e DS-3875: Fix the attribute name of <identifier> tag to 'identifierType'.
https://jira.duraspace.org/browse/DS-3875
2018-03-21 17:58:48 -04:00
Tim Donohue
b5aba21902 Merge pull request #1969 from tdonohue/lastest_postgres_jdbx_6x
DS-3854: Update to latest PostgreSQL JDBC driver
2018-03-21 13:48:42 -05:00
Miika Nurminen
17fb6cab87 DS-3511: Add context.complete() to updateBitstreamData and deleteBitstreamPolicy to prevent HTTP 500 response on REST updates 2018-03-21 13:49:16 +00:00
Terry Brady
0506d2ffe3 Merge pull request #1934 from Georgetown-University-Libraries/ds3811r6
[DS-3811] Integrate Shibboleth into DSpace REST Report Tools
2018-03-19 15:17:36 -07:00
Terry W Brady
df16cde989 define shib path at top of file 2018-03-19 14:39:41 -07:00
jsavell
5b75b0a3b9 reload dso when getting handle from HandleUtil 2018-03-19 10:30:08 -05:00
jsavell
ff2305ee51 Revert "reload dso before adding to validity"
This reverts commit b1d56059cd.
2018-03-19 09:57:17 -05:00
Terry Brady
79d94d92f9 Merge branch 'dspace-6_x' into ds3811r6 2018-03-15 14:38:07 -07:00
kshepherd
78d4fb14c1 Merge pull request #1854 from Georgetown-University-Libraries/ds3704
[DS-3704] Expand DSpace REST Reports to include bitstream fields in item listing
2018-03-16 10:16:51 +13:00
kshepherd
d6f35fbda1 Merge pull request #1862 from Georgetown-University-Libraries/ds3713
[DS-3714] REST Collection Report - Need a paginated findByCollection call that can return withdrawn items
2018-03-16 09:51:19 +13:00
kshepherd
077a6e99d6 Merge pull request #1863 from Georgetown-University-Libraries/ds3714
[DS-3713] REST Query/Collection Report - Bug Filtering for Bitstream Permissions
2018-03-15 22:18:23 +13:00
kshepherd
daee0646da Merge pull request #1845 from jrihak/shibboleth-authentication-fix
[DS-3662] DSpace 'logging in' without password or with non-existent e-mail using Shib and Password authentication
2018-03-15 11:48:47 +13:00
kshepherd
6787983574 Merge pull request #1860 from KingKrimmson/dspace-6_x
DS-3710 Fix ehcache config conflict
2018-03-15 11:44:23 +13:00
Philip Vissenaekens
7592f48064 DS-3800 2018-03-07 17:05:03 +01:00
philip-muench
a61c21f216 Update choose-file.jsp
Make pause button work again
2018-03-07 15:18:40 +01:00
jsavell
b1d56059cd reload dso before adding to validity 2018-03-02 16:33:56 -06:00
Stefan
1c17e8e475 DS-3856 - foreignkey-constraint community2community_child_comm_id_fkey
Move the deletion of the parent-child relationship to the rawDelete() method.
2018-03-01 10:15:57 +01:00
ssolim
7ed10610de use StandardHttpRequestRetryHandler instead of override 2018-02-28 16:05:36 +01:00
ssolim
7ce43d4027 DS-3858 add HttpRequestRetryHandler to DataCiteCon 2018-02-28 11:30:50 +01:00
Stefan Fritzsche
17907bf442 DS-3856 - foreignkey-constraint community2community_child_comm_id_fkey 2018-02-28 11:00:54 +01:00
Tim Donohue
1b21e0baef Update to latest PostgreSQL JDBC driver 2018-02-26 20:31:07 +00:00
Tim Donohue
7bf537e3ab Merge pull request #1966 from mwoodiupui/DS-3852-6_x
[DS-3852] OAI indexer message not helpful in locating problems
2018-02-26 09:32:49 -06:00
Mark H. Wood
78ed97c78d [DS-3852] Give more information about the item just indexed, to help identify it in case of problems.
07b050c
2018-02-24 17:39:44 -05:00
ihausmann
fbb0b73b61 DS-3404: JSPUI: Fix authority lookup
My colleague Eike made a mistake fixing the bug. His solution only works for repeatable fields.
 
https://jira.duraspace.org/browse/DS-3404
https://github.com/DSpace/DSpace/pull/1755
2018-02-23 12:36:27 +01:00
Tom Desair (Atmire)
a33e886cf0 Merge pull request #1848 from tuub/DS-3700
DS-3700: MediaFilterServiceImpl forgot to close an input stream.
2018-02-21 16:09:36 +01:00
kshepherd
48b1ac8e18 Merge pull request #1951 from Georgetown-University-Libraries/ds3835
[DS-3835] Add js work around to preserve current scope
2018-02-20 18:23:45 +13:00
kshepherd
fb08f721c5 Merge pull request #1960 from hardyoyo/DS-3839-revised
[DS-3839] moved the autoorient IM op to the top of the operations lis…
2018-02-20 16:04:18 +13:00
Hardy Pottinger
d1edfb5f85 [DS-3839] moved the autoorient IM op to the top of the operations list, where it belongs 2018-02-19 11:17:48 -06:00
Hardy Pottinger
2c5dcf13ef [DS-3839] added op.autoOrient to ImageMagickThumbnailFilter (#1956) 2018-02-16 07:59:46 -06:00
Pascal-Nicolas Becker
2754a7bad8 Merge pull request #1946 from philip-muench/dspace-6_x
DS-3827 LazyInitializationException in formats.jsp
2018-02-15 11:48:54 +01:00
Terry W Brady
6275a59f24 add Mirage fix 2018-02-08 16:30:03 -08:00
Terry W Brady
409075e447 Add js work around to preserve current scope 2018-02-08 14:56:03 -08:00
Mark H. Wood
4cdfd34a08 Merge pull request #1925 from mwoodiupui/DS-3434
[DS-3434] DSpace fails to start when a database connection pool is supplied through JNDI
2018-02-07 10:25:48 -05:00
Jonas Van Goolen
5684f24944 DS-3830 Item retrieval fallbacks for versioning navigation 2018-02-06 08:43:29 +01:00
Mark H. Wood
2b55ec5c63 Merge pull request #1917 from alanorth/6_x-remove-jndi-dspacecfg
DS-3803 Remove db.jndi setting from dspace.cfg
Should be ported to master as well.
2018-02-05 10:53:20 -05:00
philip-muench
5ed105974b Update formats.jsp
This JSP produces an internal error due to a LazyInitializationException. The exception is produced because the dspace context is aborted before completing all database queries. JIRA ticket: DS-3827
2018-02-05 13:07:38 +01:00
Tim Donohue
81af7f47a7 DS-3795: Update Apache POI library to latest version 2018-02-02 16:09:37 +00:00
Alexander Sulfrian
de1e26b3ee [DS-3822] Don't guess XML structure during ingest
The XML document used during ingestion can contain multiple XML nodes directly
inside the XML root. The crosswalk should not modify the source document, but
only hand it over to the XSLT stylesheet.
2018-01-30 19:09:45 +01:00
Tim Donohue
d90e1667b6 Merge pull request #1937 from tdonohue/bump-commons-fileupload-version
[DS-3795] increased version for commons-fileupload (backport to 6.x)
2018-01-29 10:14:26 -06:00
Tim Donohue
d9c7ac61e9 Merge pull request #1938 from tdonohue/DS-3795-bump-jackson-version
[DS-3795] bumped google-http-client-jackson2 to 1.23.0
2018-01-29 10:14:05 -06:00
Hardy Pottinger
5d8e34c0c3 [DS-3795] whoops, let's pick an actual version number for google-api-services-analytics, heh 2018-01-26 22:34:21 +00:00
Hardy Pottinger
47a5898fd9 [DS-3795] bumped bumped other Google API dependencies to 1.23.0, as per suggestion of tdonohue 2018-01-26 22:34:13 +00:00
Hardy Pottinger
b16b116f54 [DS-3795] bumped google-http-client-jackson2 to 1.23.0 2018-01-26 22:34:04 +00:00
Hardy Pottinger
9654ea87c9 increased version for commons-fileupload 2018-01-26 22:31:46 +00:00
Terry W Brady
e2975e26ed Eliminate bypass authentication checks 2018-01-25 11:31:10 -08:00
Terry W Brady
061298640b Enable shibb authentication in rest tools 2018-01-25 11:30:44 -08:00
Mark H. Wood
e2a771d10d [DS-3434] Look up a bean implementing DataSource instead of accepting any old Object. This should be common to all implementations. 2018-01-23 11:10:51 -05:00
Mark H. Wood
d37670776b [DS-3434] Make some documentation more visible. 2018-01-19 14:54:24 -05:00
Mark H. Wood
49e9e3817e [DS-3434] Look up generic object instead of a specific DataSource subclass. 2018-01-19 14:06:58 -05:00
Mark H. Wood
e640106468 Merge pull request #1820 from mwoodiupui/DS-3667
[DS-3667] Document fundamental persistence support classes.
2018-01-18 15:32:34 -05:00
Alan Orth
3bb04dac4c DS-3803 Remove db.jndi setting from dspace.cfg
As of DSpace 6.x this setting is no longer used and is not customizable
by the user. Now DSpace always looks for a pool named "jdbc/dspace" in
JNDI and falls back to creating a pool with the db.* settings located
in dspace.cfg.

See: https://wiki.duraspace.org/display/DSDOC6x/Configuration+Reference
See: dspace/config/spring/api/core-hibernate.xml
See: https://jira.duraspace.org/browse/DS-3434
2018-01-16 19:24:12 +02:00
Philip Vissenaekens
1ea55a4fe6 DS-3800 2018-01-10 10:18:18 +01:00
Per Broman
31a613cb1a [DS-3556] Rollback of Xalan from 2.7.2 to 2.7.0 to fix DS-3556 and DS-3733 2018-01-06 09:37:25 +01:00
gressho
bda2f8709c Fix authentication problem in SwordV2 implementation (DS-3310). 2018-01-04 10:59:35 +01:00
Hardy Pottinger
ff69c1fa8c [DS-3087] removed inlineMath setting from MathJax config in Mirage and Mirage2 page-structure.xsl files, the defaults are sensible and preferable (#1896) 2017-12-12 20:18:25 -06:00
Hardy Pottinger
8f85b764f4 [DS-3757] increase default clamav socket timeout to 6 minutes (#1886) 2017-11-27 10:24:07 -06:00
marsa
83ec310d0e DS-3769 Set the right hibernate property of org.dspace.eperson.Subscription: ePerson instead of eperson.id 2017-11-27 17:12:18 +01:00
marsa
38d951062c DS-3768 Fixes the harvest solr parse error by setting the requirements and syntaxes of the field location and lastModified properly. 2017-11-27 16:42:28 +01:00
Lotte Hofstede
14eef1b409 DS-3560: update deprecated MathJax url for 6.x 2017-11-21 11:03:58 +01:00
Mark H. Wood
10dc184824 Merge pull request #1831 from AlexanderS/fix/multiple-use-vocabulary
DS-3682: Fix reusing of the same vocabulary dialog
2017-11-20 10:23:55 -05:00
Mark H. Wood
f8244980f0 Merge pull request #1839 from Generalelektrix/dspace-6_x
[DS-3332] Handle resolver is hardcoded in org.dspace.handle.UpdateHandlePrefix
2017-11-20 10:22:02 -05:00
Martin Walk
1ba1a17c52 Empty commit to trigger Travis CI 2017-11-17 14:39:54 +01:00
Martin Walk
9ce4653ffd Add search filters
- add search filter for original_bundle_filenames and original_bundle_descriptions to discovery.xml
-  add messages
2017-11-17 11:37:31 +01:00
marsaoua
1b90001420 DS-3729: Set the Bitstream deletion flag in the database in case of an item deletion (#1874) 2017-11-16 13:42:32 -06:00
Alexander Sulfrian
cf45326276 DS-3756: Fix "back" on last page of submission
The back button on the last page (the maximum reached) of the current
submission should ignore required fields. Because maxStepAndPage is a Java
object equals have to be used to compare multiple instances.
2017-11-15 16:59:23 +01:00
Mark H. Wood
4304d8a872 Merge pull request #1870 from AlexanderS/missing-readonly
DS-3724: Missing readonly in input-forms.dtd

This should be ported to DSpace 7, but note that the submission configuration is being refactored.
2017-11-01 12:11:35 -04:00
Christian Scheible
5debb078d0 DS-3734 Fixed missing trigger of item last modified date when adding a bitstream. 2017-10-27 14:06:32 +02:00
Alexander Sulfrian
2bf07661bf DS-3724: Missing readonly in input-forms.dtd 2017-10-23 15:24:19 +02:00
Terry W Brady
63e6823b62 clarify methods that return all vs archived items 2017-10-16 08:51:35 -07:00
Philip Muench
6f892e70e8 DS-3707, DS-3715: Fixes to item level embargo/privacy in OAI-PMH 2017-10-16 15:28:47 +02:00
ssolim
e5cead0063 change decodefromurl default value to normal behaviour
DS-3629
2017-10-06 09:00:26 +02:00
ssolim
dd5a277f7b Fix logical error in searchResultCount
GroupServiceImpl searchResultCount returned always 1 when no searchterm was submitted, but was intended to return the count of all groups
2017-10-06 08:58:32 +02:00
Terry W Brady
e8b0a1d86b Call findAll/countAll to include withdrawn items 2017-10-05 16:22:55 -07:00
Terry W Brady
da5c795804 Add countAllItems methods 2017-10-05 16:22:11 -07:00
Terry W Brady
7f9e2d7bb0 convert bundle enum to string 2017-10-05 13:53:13 -07:00
Terry W Brady
aa0ced3d10 add pagination methods to find all 2017-10-05 13:50:48 -07:00
Chris Herron
9e6768241b DS-3710 Fix ehcache config conflict 2017-10-05 11:07:44 -04:00
Ivan Masár
c30018b089 DS-3705 Recent Submissions in Reference theme completely covered up by navigation 2017-10-04 10:50:45 +02:00
Terry W Brady
5c334351fa clean up css 2017-10-03 15:09:10 -07:00
Pascal-Nicolas Becker
23f2573460 Merge pull request #1851 from tuub/DS-3627
DS-3627: Cleanup utility leaves files in assetstore
2017-09-29 17:46:22 +02:00
Pascal-Nicolas Becker
179141dc4a DS-3700: MediaFilterServiceImpl forgot to close an input stream. 2017-09-29 16:47:58 +02:00
Terry W Brady
fd1afab6fc Merge bitstream fields into report 2017-09-28 17:22:13 -07:00
Pascal-Nicolas Becker
e59611b5c7 DS-3627: Cleanup utility leaves files in assetstore 2017-09-28 08:11:53 +02:00
Jakub Řihák
968487b9d2 [DS-3662] DSpace 'logging in' without password or with non-existent e-mail using Shib and Password authentication
Added extra check for empty value of an attribute.
In case that value is Empty, it should not be returned, return 'null' instead.
This prevents passing empty value to other methods, stops the authentication process
and prevents creation of 'empty' DSpace EPerson if autoregister == true and it subsequent
authentication.
2017-09-21 17:40:13 +02:00
Martin Walk
a3ea6d5df8 Add expected license header 2017-09-14 09:21:47 +02:00
Generalelektrix
5948e33517 DS-3332
Centralized most references to http://hdl.handle.net/ and to handle.canonical.prefix to HandleService. Created a new method: getCanonicalPrefix() in HandleService, adjusted getCanonicalForm(). As far as I can tell, remaining references to  http://hdl.handle.net/ shout stay like that since they are used as default values or are just included in documentation sections.
2017-09-12 15:12:12 -04:00
Martin Walk
aa69b2220a Add plugin to index filenames and file descriptions for files in ORIGINAL bundle 2017-09-12 13:01:11 +02:00
Generalelektrix
d7a0e0f560 Merge remote-tracking branch 'upstream/dspace-6_x' into dspace-6_x 2017-09-08 11:33:06 -04:00
Mark H. Wood
fc3ea83049 [maven-release-plugin] prepare for next development iteration 2017-09-07 16:14:13 -04:00
Mark H. Wood
e5cb62997a [maven-release-plugin] prepare release dspace-6.2 2017-09-07 16:14:03 -04:00
Mark H. Wood
ff7c7e3d6d Regenerate third-party license list 2017-09-07 15:16:09 -04:00
Mark H. Wood
068047cb11 Update LICENSE copyright claim with current year. 2017-09-07 15:08:17 -04:00
Terry Brady
0e7c7c0886 Merge pull request #1836 from Generalelektrix/dspace-6_x
DS-3687
2017-09-06 12:14:19 -07:00
Generalelektrix
a814d177e5 Merge remote-tracking branch 'upstream/dspace-6_x' into dspace-6_x 2017-09-06 10:45:52 -04:00
Generalelektrix
6626901564 DS-3687
Making key generic for legacy note value since it is not only used in jspui.
2017-09-06 10:28:14 -04:00
Generalelektrix
6b8f072d3e DS-3687 Hard coded note not compatible with multi-lingual sites for legacy stats
Changed hard coded string for reference to a new field in language bundle.
2017-09-06 10:18:50 -04:00
Alexander Sulfrian
41d8668331 AuthorityIndexer: Uncache Item after getting authority values
The AuthorityIndexClient walks over all Items in the repository and generates
all missing AuthorityValues for all of them. After getting the missing values
we need to uncache the Items to free the memory.
2017-08-31 18:52:29 +02:00
Alexander Sulfrian
0082e5b7da AuthorityIndexer: Only use non-null authority values 2017-08-31 18:52:29 +02:00
Alexander Sulfrian
3cb60236d0 AuthorityIndexer: Add possibility to cache new values
Now the DSpaceAuthorityIndexer can cache authority values for specific metadata
content (without an authority key) again.
2017-08-31 18:49:49 +02:00
Alexander Sulfrian
64022a92fb AuthorityIndexer: Remove state
The DSpaceAuthorityIndexer now returns all AuthorityValues for the specified
Items at once. It iterates over all values of all metadata fields (that are
configured for authority) of the item and gets or generates the authority
values for all of them. So it can be called from various threads without the
possibility for concurrent data access.

This currently removes the cache for newly generated authority values.
2017-08-31 18:48:11 +02:00
Alexander Sulfrian
c3214d6f77 AuthorityIndexer: Only handle one item at a time
The DSpaceAuthorityIndexer should only handle one item at a time. If the caller
needs multiple items to be indexed, the Indexer should be called multiple
times.
2017-08-31 18:48:11 +02:00
Alexander Sulfrian
889499105f AuthorityIndexer: Remove unused parameter 2017-08-31 18:48:07 +02:00
Tim Donohue
fa587c52ed Merge pull request #1830 from tuub/DS-3680
DS-3680: Database changes of consumers aren't persisted anymore
2017-08-31 06:41:19 +10:00
Alexander Sulfrian
6a14047db6 vocabulary-support: Fix reusing of the same dialog
If the same vocabulary is used multiple times on the same input page, the
dialog is only fetched the first time from the server and reused afterwards.
The problem is, that the target input field is contained in the dialog. When
reusing the dialog the target field should be updated.
2017-08-25 14:48:46 +02:00
Pascal-Nicolas Becker
d753c09b22 DS-3680: Remove problematic unaching. Also see DS-3681 as follow-up. 2017-08-24 18:25:41 +02:00
Pascal-Nicolas Becker
fbb45ba758 DS-3680: clarify that we need to dispatch events before committing 2017-08-24 18:25:20 +02:00
Pascal-Nicolas Becker
014456e1ed Revert "Events must be dispatched after commit() to ensure they can retrieve latest data from DB"
This reverts commit 646936a3d8.
2017-08-24 18:22:58 +02:00
Terry Brady
258b4f00e9 [DS-3602] Ensure Consistent Use of Legacy Id in Usage Queries (#1782)
* ensure that owning Item,Coll,Comm use legacy consistently

* scopeId query

* refine queries

* alter id query

* Commenting the behavior of the id / legacyId search

* Address duplicate disp for DSO w legacy and uuid stats
2017-08-17 23:48:25 +10:00
Tim Donohue
3798a12778 Merge pull request #1824 from tdonohue/DS-3656_and_DS-3648
DS-3656 and DS-3648 : Fix several Hibernate caching / saving issues
2017-08-17 06:53:28 +10:00
Hardy Pottinger
bc82adef5e [DS-3674] copied over input-forms.xml to the test config folder 2017-08-15 14:43:41 -05:00
Mark H. Wood
28bbf4b930 [DS-3667] Take up PR comments and document another class. 2017-08-11 17:02:14 -04:00
Tim Donohue
d4d61eed68 Replace dispatchEvents() call with an actual commit() to ensure changes are saved 2017-08-10 21:27:34 +00:00
Tim Donohue
646936a3d8 Events must be dispatched after commit() to ensure they can retrieve latest data from DB 2017-08-10 21:27:00 +00:00
Tim Donohue
9dd6bb0f08 DS-3648: Don't uncache submitter and related groups. Also DS-3656: Flush changes before evict() 2017-08-10 21:25:38 +00:00
Terry Brady
0e2ed31deb Merge pull request #1821 from Georgetown-University-Libraries/ds3661r6x
[DS-3661] Port to 6x: ImageMagick PDF Processing Degraded with Color Space Changes
2017-08-09 13:18:55 -07:00
Terry W Brady
1492dfef92 Normalize space 2017-08-09 13:02:04 -07:00
Terry W Brady
8b6c1acab1 Port PR1817, Only request image info if color space 2017-08-09 13:01:17 -07:00
Alan Orth
e88924b7da DS-3517 Allow improved handling of CMYK PDFs
Allow ImageMagick to generate thumbnails with more accurate colors
for PDFs using the CMYK color system. This adds two options to the
dspace.cfg where the user can optionally specify paths to CMYK and
RGB color profiles if they are available on their system (they are
provided by Ghostscript 9.x).

Uses im4java's Info class to determine the color system being used
by the PDF.

See: http://im4java.sourceforge.net/docs/dev-guide.html
2017-08-09 19:45:28 +00:00
Terry Brady
42608e028e Merge pull request #1816 from AlexanderS/fix-discovery-reindex
DS-3660: Fix discovery reindex on metadata change
2017-08-09 12:08:31 -07:00
Mark H. Wood
801f39daeb [DS-3667] Document fundamental persistence support classes. 2017-08-08 23:46:52 -04:00
Alexander Sulfrian
7e68165ded DS-3660: Fix discovery reindex on metadata change
Stored objects may get evicted from the session cache and get into detached
state. Lazy loaded fields are inaccessible and throw an exception on access.

Before using objects they have to be reloaded (retrieved from the
database and associated with the session again).
2017-08-03 16:25:39 +02:00
Tim Donohue
cfecf10e81 Merge pull request #1815 from tdonohue/DS-3659
DS-3659: Database migrate fails to create the initial groups
2017-08-03 23:51:47 +10:00
Alexander Sulfrian
5d656ea922 XMLUI: Remove doubled translation key (#1818)
The key "xmlui.ChoiceLookupTransformer.lookup" is already in line 2368 of the
same file.
2017-08-03 15:23:49 +02:00
Tim Donohue
62e2ac81fb Merge pull request #1814 from AlexanderS/fix/i18n-key-typo
XMLUI/SwordClient: Fix typo in i18n key
2017-08-02 07:33:05 +10:00
Tim Donohue
e9ace604a7 DS-3659: Ensure readonly connections can never rollback 2017-08-01 18:00:28 +00:00
Alexander Sulfrian
7f91528c1a XMLUI/SwordClient: Fix typo in i18n key 2017-07-25 15:21:10 +02:00
Tim Donohue
4881e9da20 [maven-release-plugin] prepare for next development iteration 2017-07-13 12:15:12 -05:00
Tim Donohue
eb4d56201a [maven-release-plugin] prepare release dspace-6.1 2017-07-13 12:15:02 -05:00
Tim Donohue
df9fb114ba Merge pull request #1807 from tdonohue/travis-fixes
Pin versions of SASS and Compass that Travis CI uses
2017-07-14 02:58:35 +10:00
Tim Donohue
f3556278aa Pin versions of SASS and Compass that Travis uses 2017-07-13 16:28:35 +00:00
Tim Donohue
f6af76c6d8 Revert 6.1 release 2017-07-13 14:15:21 +00:00
Tim Donohue
151a5f8fe2 [maven-release-plugin] prepare for next development iteration 2017-07-12 20:55:13 +00:00
Tim Donohue
57044f6698 [maven-release-plugin] prepare release dspace-6.1 2017-07-12 20:55:07 +00:00
Tim Donohue
4954f96f1d Merge pull request #1785 from atmire/DS-3127-DSpace-6_Whitelist-allowable-formats-Google-Scholar-citation_pdf_url
DS-3127 Whitelist allowable formats google scholar citation pdf url
2017-07-12 06:40:45 +10:00
Tim Donohue
972f76e771 Merge pull request #1790 from tomdesair/DS-3632_Correct-update-handle-prefix-script
DS-3632: Correct update-handle-prefix script
2017-07-12 06:27:08 +10:00
Tim Donohue
e30b0cdec6 DS-3431 : Fix broken tests by removing nullifying of global eperson 2017-07-11 16:13:25 +00:00
Pascal-Nicolas Becker
a0f226b763 [DS-3431] Harden DSpace's BasicWorfklowService 2017-07-11 16:10:08 +00:00
Tim Donohue
bcf3110db9 Merge pull request #1723 from atmire/DS-2359
DS-2359 Error when depositing large files via browser (over 2Gb)
2017-07-08 05:56:33 +10:00
Tom Desair
c34b277c8d DS-3628: Check READ resouce policies for items return by REST find-by-metadata-field endpoint 2017-07-07 19:47:26 +00:00
Pascal-Nicolas Becker
6263444f79 DS-3619: AuthorizeService.getAuthorizedGroups(...) should check dates 2017-07-07 19:30:00 +00:00
Tim Donohue
9caff2caab Merge pull request #1799 from tdonohue/DS-3397-6x
[DS-3397] Fix error when getting bitstream policies in REST API (6.x version)
2017-07-07 02:47:42 +10:00
Tim Donohue
6151f4f594 Merge pull request #1798 from atmire/DS-3563-DSpace-6_Missing-index-metadatavalue-resource-type-id
DS-3563: Fix Oracle Flyway migration error
2017-07-06 01:34:38 +10:00
Tim Donohue
f953848a6d [DS-3397] Add null checks to EPerson and Group 2017-07-05 15:27:43 +00:00
Tom Desair
ccc1b1b784 DS-3563: Fix Oracle Flyway migration error 2017-07-05 14:02:29 +02:00
Tom Desair
1bb6369ad6 DS-3127: Update test assert descriptions of GoogleBitstreamComparatorTest 2017-07-04 16:07:57 +02:00
Tom Desair
e31daa0230 DS-3632: Prevent the use of the locate function as this seems to give inconsistent results 2017-06-30 17:13:31 +02:00
Tom Desair
762197b452 DS-3632: Changed the update-handle-prefix script so that it does not change the handle suffix 2017-06-30 16:58:15 +02:00
kshepherd
ecd0230943 Merge pull request #1780 from atmire/DS-3595-6x
DS-3595
2017-06-30 05:41:42 +10:00
Philip Vissenaekens
c9cad9083e Merge branch 'dspace-6_x' into DS-3595-6x 2017-06-29 15:38:20 +02:00
Tom Desair
b462e0ac6d Merge branch 'dspace-6_x' into DS-3127-DSpace-6_Whitelist-allowable-formats-Google-Scholar-citation_pdf_url 2017-06-29 09:55:42 +02:00
Terry Brady
65d638771f Merge pull request #1747 from AlexanderS/localization-input-forms-xmlui
DS-3598: Allow localization of input-forms.xml with XMLUI
2017-06-28 17:15:40 -07:00
Terry Brady
224df82087 Merge pull request #1752 from AlexanderS/fix/DS-3601-npe-feedback-page
DS-3601: Fix NPE when accessing feedback page without "Referer" header
2017-06-28 16:31:44 -07:00
Terry Brady
a6b3ce0d46 Merge pull request #1784 from rivaldi8/DS-3245-csv-linebreaks_ds6
DS-3245: CSV linebreaks not supported by Bulkedit -  DSpace 6
2017-06-28 15:47:39 -07:00
Terry Brady
2944279618 Merge pull request #1727 from tomdesair/DS-3579_Context-mode-and-cache-management-CLI-commands
DS-3579 Context mode and cache management for CLI commands
2017-06-28 14:49:11 -07:00
Tom Desair
fe115125d1 DS-3127: Prevent database updates when directly manipulating the bistream list of a bundle 2017-06-28 17:46:58 +02:00
Tom Desair
6e9dec2c85 DS-3579: Make sure context.complete() can be called when in read-only 2017-06-28 16:15:30 +02:00
Terry Brady
fd298ae462 Merge pull request #1772 from tomdesair/DS-3571_Log-Hibernate-validation-errors
DS-3571 Log hibernate validation errors
2017-06-27 15:22:44 -07:00
Mark H. Wood
470c9b8f50 Merge pull request #1788 from mwoodiupui/DS-3568
[DS-3568] UTF-8 characters are now supported in configuration files
2017-06-26 13:34:04 -04:00
Terry Brady
33d3df72d6 Merge pull request #1732 from samuelcambien/DS-3584
DS-3584 when editing an eperson, trying to change its email address is ignored if another user already has that email address.
2017-06-23 16:56:27 -07:00
Christian Scheible
43cc3bd874 DS-3568. UTF-8 characters are now supported in configuration files 2017-06-22 16:35:30 -04:00
Tom Desair
3dc4909935 Fix IT tests 2017-06-22 17:07:55 +02:00
Tom Desair
71791c720f DS-3127: Process review feedback and fix tests 2017-06-22 15:01:45 +02:00
Àlex Magaz Graça
70a5124373 DS-3245: CSV linebreaks not supported by Bulkedit
When a multiline field contained empty lines, the importer stopped
reading the file. This reverts a change in 53d387fed to stop when the
end of the file has been reached instead.

Fixes https://jira.duraspace.org/browse/DS-3245
2017-06-22 13:57:06 +02:00
Philip Vissenaekens
7879ecdf14 DS-3595 2017-06-21 17:18:30 +02:00
Mark H. Wood
1db3261b54 Merge pull request #1696 from tomdesair/DS-2748_Improve-cocoon-page-not-found-page
DS-2748: Do not throw an exception in the PageNotFoundTransformer
2017-06-21 10:18:53 -04:00
Alexander Sulfrian
593e6a5b37 DS-3522: Ensure Submission Policies are removed in XMLWorkflow 2017-06-21 15:21:13 +02:00
Tom Desair
3732cafc4e Merge branch 'dspace-6_x' into DS-3579_Context-mode-and-cache-management-CLI-commands 2017-06-19 17:36:55 +02:00
Tom Desair
6f52d9700a Merge branch 'dspace-6_x' into DS-3579_Context-mode-and-cache-management-CLI-commands 2017-06-19 17:18:22 +02:00
Tom Desair
769d3b590f DS-3579: Fix bug in metadata-import script 2017-06-19 14:59:00 +02:00
Tom Desair
7d04016436 Merge branch 'DS-3579_Context-mode-and-cache-management-CLI-commands' of https://github.com/tomdesair/DSpace into DS-3579_Context-mode-and-cache-management-CLI-commands 2017-06-19 14:38:28 +02:00
edusperoni
0084ae3833 DS-2291 Autocomplete not working on Mirage2 (#1741)
* fixing autocomplete problem listed on DS-2291. Also fixes the spinner that was being referenced in the wrong path.

* fix common lookup button (now consistent with the author lookup button)
2017-06-14 11:36:45 -05:00
Pascal-Nicolas Becker
fc1b22e59c Merge pull request #1767 from tomdesair/PR-1715
DS-3572: Check authorization for a specified user instead of currentUser
2017-06-13 16:08:33 +02:00
Tom Desair
9af33bc244 DS-3571: Make sure that any Hibernate schema validation error is logged instead of just a NullPointerException 2017-06-13 11:17:20 +02:00
Tom Desair
bd2d81d556 DS-3572: Renamed epersonInGroup to isEPersonInGroup 2017-06-12 15:17:59 +02:00
Tom Desair
f6eb13cf53 DS-3572: Restored behaviour of GroupService.isMember and moved new behaviour to GroupService.isParentOf 2017-06-12 15:05:59 +02:00
Tom Desair
b4a24fff7b DS-3572: Fix bug where normal group membership is ignored if special groups are present + added tests 2017-06-10 14:32:45 +02:00
Tom Desair
8bb7eb0fe5 Improve tests + make GroupService.isMember method more performant for special groups 2017-06-10 00:34:24 +02:00
Eduardo Speroni
eb9ce230ad fixed nested vocabulary search 2017-06-09 19:18:15 -03:00
Tom Desair
f48178ed41 Fix DSpace AIP IT tests: Set correct membership for admin 2017-06-09 20:09:15 +02:00
Tim Donohue
1b70e64f77 Merge pull request #1751 from tomdesair/DS-3406_Sort-Communities-and-Collections-Hibernate-Sort-Annotation
DS-3406: Sort communities and collections iteration 2
2017-06-09 09:35:00 -07:00
Tom Desair
b56bb4de3e Attempt to fix contstraint violation 2017-06-09 17:51:27 +02:00
Tom Desair
139f01fffd Restore GroupServiceImpl.isMember logic + fix tests 2017-06-09 17:30:06 +02:00
frederic
257d75ca0c DS-3406 unit tests for getCollection/getCommunity for different dspace objects 2017-06-09 10:05:36 +02:00
frederic
5422a63f08 DS-3579 removed FETCH keyword and fixed typo in help message of harvest 2017-06-09 09:46:28 +02:00
Pascal-Nicolas Becker
853e6baff1 Merge pull request #1761 from tdonohue/DS-3604
DS-3604: Fix Bitstream reordering in JSPUI
2017-06-06 23:08:06 +02:00
Tim Donohue
205d8b9f92 Refactor BundleServiceImpl.setOrder() to be more failsafe. Update Tests to prove out (previously these new tests failed) 2017-06-06 14:07:16 +00:00
Pascal-Nicolas Becker
bb1e13a3b2 DS-3572: Adding simple unit test for DS-3572. 2017-06-06 15:54:13 +02:00
Pascal-Nicolas Becker
d2311663d3 DS-3572: Check authorization for a specified user instead of currentUser 2017-06-06 15:54:12 +02:00
kshepherd
7d1836bddc Merge pull request #1762 from Georgetown-University-Libraries/ds3563-6x
[DS-3563] Port PR to 6x
2017-06-06 12:36:46 +12:00
Tom Desair
36002b5829 DS-3563: Conditional create index for Oracle 2017-06-02 13:19:02 -07:00
Tom Desair
6392e195b9 DS-3563 Added missing index on metadatavalue.resource_type_id 2017-06-02 13:18:43 -07:00
Tim Donohue
d37d3a04ac Create a valid unit test for BundleServiceImpl.setOrder() method 2017-06-02 20:14:29 +00:00
Tim Donohue
ef3afe19eb DS-3604: Sync JSPUI bitstream reorder code with XMLUI code 2017-06-02 19:50:14 +00:00
Pascal-Nicolas Becker
81e171ec24 Merge pull request #1760 from tuub/DS-3582
DS-3582: Reintroduce calls to context.abort() at the end of some JSPs to free db resources.
2017-06-02 12:54:29 +02:00
Pascal-Nicolas Becker
4086e73e0b DS-3582: Any jsp that call UIUtil.obtainContext must free DB resources
Any jsp that call UIUtil.obationContext must either call context.abort
or context.commit to free the database connection to avoid exhausting
the database connection pool.
2017-06-01 17:37:30 +02:00
Tim Donohue
5f827ecbe8 Merge pull request #1759 from AlexanderS/rest-submissions-to-workflow
DS-3281: Start workflow for REST submissions
2017-05-31 13:52:42 -07:00
Alexander Sulfrian
30c4ca0fea DS-3281: Start workflow for REST submissions
If an item is submitted through the REST API (via POST on
/{collection_id}/items) the item should not be published immediately,
but should be approved via the defined workflow.
2017-05-31 18:27:44 +02:00
Terry Brady
094f775b6a Merge pull request #1746 from Georgetown-University-Libraries/ds3594
[DS-3594] Refine unit tests to run against postgres
2017-05-31 08:59:14 -07:00
Alexander Sulfrian
85588871ca JSPUI: Fix DSpaceChoiceLookup in edit-item-form.jsp
The collection id changed from a simple integer to a UUID and JavaScript
needs quotes around it.
2017-05-31 10:49:59 +02:00
nteike
1ad95bdb12 JSPUI: Fix DSpaceChoiceLookup in edit-metadata.jsp 2017-05-31 10:49:37 +02:00
Terry Brady
593cc085d2 Add comment for null check during sort 2017-05-23 10:23:16 -07:00
Tom Desair
f4cdfb4e65 Revert imports 2017-05-22 17:35:03 +02:00
Tom Desair
b4d8436672 DS-3406: Remove unnecessary commit 2017-05-22 17:17:03 +02:00
Tom Desair
271b6913ab Fix integration tests. Remove Hibernate Sort annotations as a collection name can change and this breaks the Set semantics 2017-05-22 15:06:44 +02:00
Alexander Sulfrian
137384c13f DS-3601: Fix NPE when accessing feedback page without "Referer" header 2017-05-22 12:24:31 +02:00
Tom Desair
72f8f9461b Fix bug so that comparator can be used for sets 2017-05-22 10:52:15 +02:00
Tom Desair
78effeac61 Fixing tests 2017-05-22 09:39:13 +02:00
Yana De Pauw
62c804f1e8 DS-3406: Ordering sub communities and collections 2017-05-22 09:39:12 +02:00
Tim Donohue
40b05ec773 Fix minor compilation error in cherry-pick of PR#1662 2017-05-18 21:03:35 +00:00
Miika Nurminen
a0e91cacd9 [DS-3463] Fix IP authentication for anonymous users
Added group membership check based on context even if no eperson is found. Affects file downloads in (at least) xmlui.
2017-05-18 20:12:34 +00:00
Alexander Sulfrian
90ca4deb35 Fix code style 2017-05-18 11:20:15 +02:00
Alexander Sulfrian
83002c3177 DS-3598: Allow localization of input-forms.xml with XMLUI
This allows separate input-forms.xml for the different locales with
XMLUI. The feature was already present in JSPUI.
2017-05-17 16:05:14 +02:00
Terry Brady
ebf256caa1 Avoid NPE 2017-05-15 14:37:59 -07:00
Terry Brady
1d655e97c9 Make destroy more forgiving of test failures 2017-05-15 14:31:41 -07:00
Terry Brady
d85a2d9153 Avoid handle collision in persistent db 2017-05-15 14:19:39 -07:00
Terry Brady
6f8a8b7f25 change parameter setting for db portability 2017-05-15 13:47:20 -07:00
Generalelektrix
3ea041d4dc DS-3164 Item statistic displays UUID of bitstreams instead of name (#1744)
simple change to return bit.getName() as opposed to return value
2017-05-10 17:16:50 -04:00
Tom Desair (Atmire)
6333fb6706 Ds 3552 read only context and hibernate improvements (#1694)
* Refactor READ ONLY mode in Context and adjust hibernate settings accordingly

* Set Context in READ-ONLY mode when retrieving community lists

* Fix Hibernate EHCache configuration + fix some Hibernate warnings

* Cache authorized actions and group membership when Context is in READ-ONLY mode

* Set default Context mode

* Let ConfigurableBrowse use a READ-ONLY context

* Add 2nd level cache support for Site and EPerson DSpaceObjects

* Added 2nd level caching for Community and Collection

* Fix tests and license checks

* Cache collection and community queries

* Small refactorings + backwards compatibility

* Set Context to READ-ONLY for JSPUI submissions and 'select collection' step

* OAI improvements part 1

* OAI indexing improvements part 1

* OAI indexing improvements part 2

* DS-3552: Only uncache resource policies in AuthorizeService when in read-only

* DS-3552: Additional comment on caching handles

* DS-3552: Fix cache leakage in SolrServiceResourceRestrictionPlugin

* DS-3552: Clear the read-only cache when switching Context modes

* DS-3552: Correct Group 2nd level cache size

* DS-3552: Always clear the cache, except when going from READ_ONLY to READ_ONLY
2017-05-04 14:12:06 -04:00
Hardy Pottinger
f62c32efe6 Merge pull request #1739 from edusperoni/handlebars-4
DS-3387 Upgrade handlebars to v4.
2017-05-04 12:28:15 -04:00
Hardy Pottinger
068be33265 Merge pull request #1707 from Frederic-Atmire/DS-3558
DS 3558 Case-insensitive bot matching option
2017-05-04 10:08:59 -04:00
Eduardo Speroni
3c25e04c08 upgrade grunt-contrib-handlebars to 1.0.0 2017-05-03 21:11:58 -03:00
Pascal-Nicolas Becker
a44b109f7a Merge pull request #1684 from tomdesair/DS-3406_Sort-Communities-and-Collections-with-comparator
DS-3406: Sort communities and collections in-memory using a comparator
2017-05-03 14:37:24 +02:00
frederic
a24b0078c2 Made service for SpringDetector and made SpringDetector delegate to it 2017-05-03 11:15:35 +02:00
Tom Desair
e358cb84d1 DS-3406: Resolve review feedback 2017-05-02 17:59:25 +02:00
frederic
0f51d5ad6a ported DS-3558 from dspace 5 to dspace6 2017-05-02 10:52:59 +02:00
frederic
454b0c9d6a Few tests to test case-(in)sensitive matching 2017-04-28 09:57:22 +02:00
frederic
6e1a5d1df9 made the necessary changes to easily test this class 2017-04-28 09:56:43 +02:00
frederic
b61c821e66 case-insensitive option commented out by default 2017-04-28 09:56:16 +02:00
frederic
fd76b587be wrote tests for botmatching 2017-04-27 14:24:07 +02:00
Eduardo Speroni
f12006fe21 Upgrade handlebars to v4.
Fixed advanced filters to work with handlebars v4. (https://github.com/wycats/handlebars.js/issues/1028)
2017-04-26 16:55:49 -03:00
Tim Donohue
3116c53d5e Merge pull request #1737 from cjuergen/DS-3585-6_x
Fix for DS3585
2017-04-26 11:09:14 -07:00
cjuergen
e2ffbaa3b8 Fix for DS3585 2017-04-26 15:49:28 +02:00
samuel
856e5ad388 DS-3584 when editing an eperson, trying to change its email address is ignored if another user already has that email address 2017-04-26 11:36:08 +02:00
Jonas Van Goolen
2c8e36fcb9 DSpace 6_X/xmlui counterpart of JIRA ticket
https://jira.duraspace.org/browse/DS-2675

Contains the following:

Fixed "starts_with" implementation to only retrieve items that ACTUALLY start with the string.
Ascending/Descending ordering now properly reset the offset (instead of remaining somewhere halfway)
2017-04-24 13:24:44 +02:00
Tom Desair
d2577fa16c DS-3579: Fix tests 2017-04-21 11:45:55 +02:00
Tom Desair
d5f9d9b0db DS-3579: Improve cache usage rdfizer, sub-daily, doi organiser 2017-04-21 11:45:55 +02:00
Tom Desair
e4b26d64ce DS-3579: Improve cache usage harvest 2017-04-21 11:45:55 +02:00
Tom Desair
2dde39abe7 DS-3579: Improve cache usage bitstore-migrate, cleanup, curate, embargo-lifter 2017-04-21 11:45:55 +02:00
Tom Desair
a715ae4d15 DS-3579: Improve cache usage export, import, itemupdate, metadata-export, packager 2017-04-21 11:45:55 +02:00
Tom Desair
e63b3f4c13 DS-3579: Improve cache usage export, import, itemupdate, metadata-export, packager 2017-04-21 11:45:54 +02:00
Tom Desair
acedcacdb3 DS-3579: Improve cache usage update-handle-prefix 2017-04-21 11:45:54 +02:00
Tom Desair
37219a986d DS-3579: checker, checker-emailer, filter-media, generate-sitemaps, index-authority 2017-04-21 11:45:54 +02:00
Tom Desair
a3fc30ad94 DS-3579: Fix tests 2017-04-20 21:55:28 +02:00
Terry Brady
e2862b3058 Merge pull request #1714 from tuub/DS-3575
DS-3575: Rename misguiding find method in ResourcePolicyService
2017-04-20 11:47:20 -07:00
Mark H. Wood
8442e6f395 Merge pull request #1717 from mwoodiupui/DS-3564
[DS-3564] Limit maximum idle database connections by default
2017-04-20 12:39:11 -04:00
Tom Desair
7e1a0a1a0c DS-3552: Fix cache leakage in SolrServiceResourceRestrictionPlugin 2017-04-20 17:40:24 +02:00
Tom Desair
a5d414c0b2 DS-3552: Additional comment on caching handles 2017-04-20 17:36:10 +02:00
Tom Desair
cabb4fab66 DS-3579: Improve cache usage rdfizer, sub-daily, doi organiser 2017-04-20 17:33:07 +02:00
Tom Desair
5c19bb52e0 DS-3579: Improve cache usage harvest 2017-04-20 17:32:26 +02:00
Tom Desair
1e62dfdbbc DS-3579: Improve cache usage bitstore-migrate, cleanup, curate, embargo-lifter 2017-04-20 17:31:49 +02:00
Tom Desair
867ab6c9b9 DS-3579: Improve cache usage export, import, itemupdate, metadata-export, packager 2017-04-20 17:30:37 +02:00
Tom Desair
392dd2653a DS-3579: Improve cache usage export, import, itemupdate, metadata-export, packager 2017-04-20 17:30:07 +02:00
Tom Desair
6f3546f844 DS-3579: Improve cache usage update-handle-prefix 2017-04-20 17:28:28 +02:00
Tim Donohue
9a0d293abf Merge pull request #1720 from Georgetown-University-Libraries/ds3516-6x
[DS-3516] 6x Port ImageMagick PDF Thumbnail class should only process PDFs
2017-04-20 06:56:08 -07:00
Philip Vissenaekens
782a963916 DS-2359 2017-04-20 13:10:39 +02:00
Tom Desair
0235ba391f DS-3579: checker, checker-emailer, filter-media, generate-sitemaps, index-authority 2017-04-20 10:41:51 +02:00
Alan Orth
eae5a96179 port PR1709 to 6x 2017-04-19 14:44:28 -07:00
Mark H. Wood
1ef1170159 [DS-3564] Limit maximum idle database connections by default 2017-04-19 14:56:44 -04:00
Tim Donohue
4f7410232a Merge pull request #1682 from tuub/DS-3535
[DS-3535] Reduced error logging by interrupted download
2017-04-19 09:45:05 -07:00
Tim Donohue
6c29cd61b6 Merge pull request #1699 from enrique/patch-1
DS-3554: Check for empty title in Submissions
2017-04-19 09:32:06 -07:00
Tim Donohue
f6a651d4df Merge pull request #1703 from samuelcambien/DS-3553
DS-3553: when creating a new version, do context complete before redirecting to the submission page
2017-04-19 09:27:14 -07:00
Tim Donohue
c57b443611 Merge pull request #1713 from atmire/DS-3573-Filtername-in-XMLUI-Discovery-filter-labels-dspace6
DS-3573: Filtername in XMLUI Discovery filter labels
2017-04-19 09:19:54 -07:00
Pascal-Nicolas Becker
a5bdff0803 DS-3575: Rename misguiding find method in ResourcePolicyService 2017-04-18 18:12:32 +02:00
samuel
e3f72b280d DS-3553: when creating a new version, do context complete before redirecting to the submission page 2017-04-18 11:01:47 +02:00
Yana De Pauw
63ed4cc1e0 DS-3573: Filtername in XMLUI Discovery filter labels 2017-04-14 15:26:08 +02:00
Tom Desair
f0a5e7d380 DS-3552: Only uncache resource policies in AuthorizeService when in read-only 2017-04-14 09:26:08 +02:00
Tom Desair
1e64850af2 OAI indexing improvements part 2 2017-04-14 00:40:19 +02:00
Tom Desair
d9db5a66ca OAI indexing improvements part 1 2017-04-14 00:21:03 +02:00
Tom Desair
5f77bd441a OAI improvements part 1 2017-04-13 17:44:21 +02:00
frederic
4b87935cbb DS-3558 removed duplicate code and changed default option 2017-04-13 16:27:19 +02:00
Tim Donohue
3db74c7ba3 Merge pull request #1671 from mwoodiupui/DS-3505
[DS-3505] Bad redirection from logout action
2017-04-12 13:37:17 -07:00
frederic
f000b280c1 DS-3558 added comments on code 2017-04-12 15:04:57 +02:00
frederic
cad79dc6c9 DS-3558 made case insensitive botsearch configurable and optimized case insensitive pattern matching 2017-04-12 14:29:58 +02:00
Enrique Martínez Zúñiga
794600b96e Fix for DS-3554
Use StringUtils.isNotBlank instead of only check for title.lenght
2017-04-05 09:31:20 -05:00
Tom Desair
044ba1acd3 DS-2748: Do not throw an exception in the PageNotFoundTransformer but do return a 404 error code 2017-04-05 15:45:32 +02:00
Tom Desair
f54fe5c12e Set Context to READ-ONLY for JSPUI submissions and 'select collection' step 2017-04-05 15:23:16 +02:00
Tom Desair
1e917ed845 Small refactorings + backwards compatibility 2017-04-05 11:02:58 +02:00
Tom Desair
7719848d47 Cache collection and community queries 2017-04-05 09:59:31 +02:00
Tom Desair
f0e9e04a3a Fix tests and license checks 2017-04-04 13:44:38 +02:00
Tom Desair
5f194334ff Added 2nd level caching for Community and Collection 2017-04-04 13:16:13 +02:00
Tom Desair
7371a7c71d Add 2nd level cache support for Site and EPerson DSpaceObjects 2017-04-03 16:21:14 +02:00
Tom Desair
3963c95f6e Let ConfigurableBrowse use a READ-ONLY context 2017-04-03 15:59:13 +02:00
Tom Desair
75497f5107 Set default Context mode 2017-04-03 15:54:18 +02:00
Tom Desair
852c4d3b62 Cache authorized actions and group membership when Context is in READ-ONLY mode 2017-04-03 15:26:29 +02:00
Tom Desair
d108464a3a Fix Hibernate EHCache configuration + fix some Hibernate warnings 2017-04-03 15:26:29 +02:00
Tom Desair
dbfc8ce9a7 Set Context in READ-ONLY mode when retrieving community lists 2017-04-03 15:26:28 +02:00
Tom Desair
eee4923518 Refactor READ ONLY mode in Context and adjust hibernate settings accordingly 2017-04-03 15:26:28 +02:00
Toni Prieto
9ef505498b [DS-2947] DIM crosswalks repeats authority & confidence values in the metadata values 2017-03-24 16:16:31 +00:00
Tom Desair
3540fe5ec6 DS-3406: Sort communities and collections in-memory using a comparator 2017-03-23 15:27:02 +01:00
Tim Donohue
57f2a10da1 Merge pull request #1663 from mwoodiupui/DS-1140
[DS-1140] Update MSWord Media Filter to use Apache POI (like PPT Filter) and also support .docx
2017-03-22 10:31:35 -05:00
Per Broman
1e33e27a84 [DS-3535] Reduced error logging by interrupted download 2017-03-21 10:29:06 +01:00
Pascal-Nicolas Becker
a54bf11b8c Merge pull request #1673 from tuub/DS-3523
[DS-3523] Bugfix for search with embargoed thumbnails
2017-03-09 12:38:58 +01:00
Per Broman
0601e9f061 [DS-3523] Bugfix for search with embargoed thumbnails 2017-03-09 12:07:52 +01:00
Mark H. Wood
b578abd054 [DS-3505] On logout redirect to dspace.url, not context path. 2017-03-08 15:51:01 -05:00
Terry Brady
bc8629b145 [DS-3348] Drop date check in EmbargoService (#1542)
* Drop date check in EmbargoService

* Revise comment per review
2017-03-08 18:29:12 +00:00
Peter Dietz
26859b1133 DS-3366 Fix handleresolver by removing out.close (#1560) 2017-03-08 18:25:38 +00:00
Andrea Schweer
97785d778f [DS-3336] Properly sort collections in move item drop-down 2017-03-08 18:08:30 +00:00
Terry Brady
f1c3a9d919 fix typo in comment 2017-03-08 17:44:30 +00:00
Terry Brady
6442c979aa First attempt to resort submitters 2017-03-08 17:44:12 +00:00
Tim Donohue
a36f5b1f48 Merge pull request #1670 from tuub/DS-3521
[DS-3521] Bugfix browsing embargoed thumbnail
2017-03-08 09:51:56 -06:00
Per Broman
36a87c2107 [DS-3521] Bugfix browsing embargoed thumbnail 2017-03-07 12:09:28 +01:00
Mark H. Wood
43d7cd564c [DS-1140] Add configuration data 2017-03-02 15:49:34 -05:00
Mark H. Wood
9d8738c934 [DS-1140] Add unit test. 2017-03-02 14:50:14 -05:00
Mark H. Wood
c09edc5a15 [DS-1140] No need to treat old and new Word formats differently 2017-03-02 14:49:24 -05:00
Tim Donohue
2d95c7a2a1 Merge pull request #1652 from Georgetown-University-Libraries/ds3282-6x
[DS-3282] 6x Fix js error for filters with dashes
2017-03-01 14:59:47 -06:00
Terry Brady
d2c43b8aa5 Merge pull request #1654 from Georgetown-University-Libraries/ds2789-6_x
[DS-2789] 6x Display a "restricted image" for a thumbnail if the bitstream is restricted
2017-03-01 12:53:44 -08:00
Terry Brady
5d9dd4d4e3 Merge pull request #1660 from Georgetown-University-Libraries/ds3283-6x2
[DS-3283] 6x Mirage2: Edit Collection Source - No Field Label for Set Id
2017-03-01 12:42:38 -08:00
Mark H. Wood
24c1f5367c [DS-1140] New POI-based MS Word extractor and some comment cleanup 2017-02-28 17:12:23 -05:00
Hardy Pottinger
fbaf950388 [DS-3475] adding more guidance to example local.cfg as per suggestion of Tim Donohue 2017-02-28 16:10:08 -06:00
Hardy Pottinger
ddedfa2a14 [DS-3475] added back assetstore.dir configuration to dspace.cfg 2017-02-28 16:07:58 -06:00
Terry W Brady
2b96f9472b Add default lock icon for Mirage theme 2017-02-27 14:10:02 -08:00
Terry W Brady
1af23f2d8b reapply pr from master 2017-02-27 14:10:02 -08:00
Terry W Brady
a868a4bc9b Re-applying changes 2017-02-27 13:45:53 -08:00
Tim Donohue
2734dca1cd Merge pull request #1659 from tdonohue/fix_travis_timeouts
Fix Travis CI Maven download timeouts
2017-02-27 15:36:07 -06:00
Tim Donohue
8c70f9bc8c Workaround for travis-ci/travis-ci#4629 2017-02-27 21:21:08 +00:00
Tom Desair
8d56e828a2 DS-3367: Fix authorization error when non-admin users claim a configurable workflow task 2017-02-23 16:28:37 -05:00
Mark H. Wood
0e8c95a196 Merge pull request #1651 from mwoodiupui/DS-3378
[DS-3378] Patch to restore lost indices, from Adan Roman
2017-02-23 16:06:08 -05:00
Terry Brady
cf190c78e8 Fix js error for filters with dashes 2017-02-23 09:40:10 -08:00
Mark H. Wood
2d1c59ac49 [DS-3378] Patch to restore lost indices, from Adan Roman 2017-02-22 17:24:46 -05:00
Tom Desair
3a03e7a9d3 DS-2952: Added missing license 2017-02-22 20:26:42 +00:00
Tom Desair
757264c1f6 DS-2952: Only prepend new line if we have an actual input stream 2017-02-22 20:26:33 +00:00
Tom Desair
dfe6d79da4 DS-2952: Small improvements to FullTextContentStreams and added a unit test for it 2017-02-22 20:26:23 +00:00
Tom Desair
708fe215b0 DS-2952: Use a SequenceInputStream to add the content of multiple full text bitstreams to SOLR 2017-02-22 20:26:09 +00:00
Hardy Pottinger
a51ad3c6eb Merge pull request #1614 from jonas-atmire/DS-3448-MultiSelect-in-Submission
DS-3448 Multi-select in submission for workflow and workspace items
2017-02-22 12:13:12 -06:00
Hardy Pottinger
c5aebee9cc Merge pull request #1649 from hardyoyo/DS-3501-fix-XML-validation-by-excluding-failing-node-packages
[DS-3501] adjust XML validation
2017-02-22 11:17:38 -06:00
Hardy Pottinger
8a06522fa9 [DS-3501] adjust XML validation to skip contents of any folder that includes the text node/node_modules 2017-02-22 16:41:35 +00:00
samuel
267518ebaf DS 3425 outputstream gets closed in JSONDiscoverySearcher 2017-02-21 21:34:29 +00:00
samuel
2685cd793e DS-3415 - administrative.js doEditCommunity wrong parameter name 2017-02-21 21:03:19 +00:00
Tim Donohue
36c7fa9c1a Merge pull request #1588 from atmire/DS-3419-6_x
DS-3419
2017-02-21 14:55:56 -06:00
Bram Luyten
54c5c2932b DS-2840 sidebar facet logging from INFO to DEBUG
Changes INFO level sidebar facet transformer log entries to DEBUG
2017-02-18 14:20:08 +01:00
Luigi Andrea Pascarelli
7225f2597a DS-3356 add turnoff authz system 2017-02-15 22:10:18 +00:00
Mark H. Wood
59632413c2 [DS-3469] virus scan during submission attempts to read uploaded bitstream as anonymous user, which fails (#1632)
* [DS-3469] Add the current session context to the curation task run.

* [DS-3469] Log how I/O failed, not just that it did.

* [DS-3469] Keep reference to Bundle from which we just removed the Bitstream instead of expecting the List of Bundle to be unaltered.

* [DS-3469] Finish switching from e.getMessage() to e

* [DS-3469] Note the side effect of calling curate() with a Context.
2017-02-08 10:32:29 -06:00
Tim Donohue
7650af1e69 Merge pull request #1639 from rradillen/DS-3473
DS-3473: add guard code in case no dot is present in bitstream name
2017-02-08 10:24:28 -06:00
Tim Donohue
e4659832a0 Merge pull request #1641 from cjuergen/DS-3479-6_x
Fix for DS-3479 preventing the import of empty metadata
2017-02-08 10:15:26 -06:00
Tim Donohue
ab982e4f0b Merge pull request #1613 from tomdesair/DS-3436-Sharding-corrupts-multivalued-fields
DS-3436 Sharding SOLR cores corrupts multivalued fields
2017-02-08 09:47:22 -06:00
Terry Brady
8d76aa2010 [DS-3456] 6x Fix Command Line Parameters for statistics import/export tools (#1624)
* Clarify command line args

* support flexible import/export of stats

* Fix DS-3464 solr-reindex-statistics for shard

* Preserve multi val fields on import/export

* Time zone consistency in shard name creation

* Migrate PR feedback from 5x to 6x

* whitespace
2017-02-08 09:43:03 -06:00
Tim Donohue
9eb7c6734c Merge pull request #1633 from Georgetown-University-Libraries/ds3457b
[DS-3457] Address tomcat hang when multiple solr shards exist in DSpace 6
2017-02-08 09:30:42 -06:00
cjuergen
99c1af8688 Fix for DS-3479 preventing the import of empty metadata 2017-02-06 15:11:14 +01:00
Roeland Dillen
866bfe8fd8 add guard code in case no dot is present in bitsream name 2017-02-05 13:45:40 +01:00
Terry Brady
12de02c7f3 Merge pull request #1637 from kshepherd/DS-3477
[DS-3477] fix altmetrics config lookups in item-view.xsl (6.x)
2017-02-02 09:56:12 -08:00
Kim Shepherd
0c0b280d05 [DS-3477] fix altmetrics config lookups in item-view.xsl 2017-02-02 18:04:36 +13:00
Hardy Pottinger
bf1979fd41 [DS-3475] adding more guidance to example local.cfg as per suggestion of Tim Donohue 2017-02-01 15:49:19 -06:00
Hardy Pottinger
e32b93bae3 [DS-3475] added back assetstore.dir configuration to dspace.cfg 2017-02-01 15:48:51 -06:00
kshepherd
f86fff9063 Merge pull request #1611 from tomdesair/DS-3446-DSpace-6x_Non-admin-submitter-cannot-remove-bitstream
DS-3446: On bitstream delete, remove policies only after the bitstream has been updated
2017-02-02 09:42:33 +13:00
Terry W Brady
f7cadf8774 Initialize solr shards at first stats post
Make it more likely that the shards are awake on first use
2017-01-31 15:02:55 -08:00
Terry W Brady
4f7520d532 Additional comments 2017-01-30 17:05:04 -08:00
Terry W Brady
9904fdb412 DS-3457 and DS-3458 fixes 2017-01-30 12:11:06 -08:00
Terry Brady
e0e223e2bf [DS-3468] 6x Ignore bin directory built by Eclipse (#1627)
* Exclude top level /bin directory built by Eclipse
2017-01-26 16:28:25 +01:00
Hardy Pottinger
45762e993d Merge pull request #1617 from jonas-atmire/DS-3445-ChecksumChecker-no-enum-constant-error
DS-3445 Only add "ResultCode" if not default
2017-01-19 10:15:11 -06:00
Andrew Bennet
ce72010805 [DS-3460] Fix incorrect REST documentation 2017-01-17 21:32:40 +01:00
Bram Luyten
faa12bfd33 Merge pull request #1610 from tomdesair/DS-3108-DSpace-6x_Support-non-email-based-authentication-in-REST-API
DS-3108 DSpace 6x: Support non-email based authentication in REST API
2017-01-14 11:44:35 +01:00
Jonas Van Goolen
2805386f9d DS-3445 Only add "ResultCode" if not default 2017-01-13 10:41:30 +01:00
Jonas Van Goolen
a62eddeb59 DS-3448 Removal of unnecessary duplicate javascript file 2017-01-13 09:43:43 +01:00
Jonas Van Goolen
c873e554d3 DS-3448 Multi-select in submission for workflow and workspace items -> License headers in new files 2017-01-12 13:52:21 +01:00
Jonas Van Goolen
01dee698c2 DS-3448 Multi-select in submission for workflow and workspace items 2017-01-11 15:33:25 +01:00
Tom Desair
eb5dc58384 DS-3436: Tell SOLR to split values of multi-valued fields when sharding cores 2017-01-11 12:55:10 +01:00
Tim Donohue
958631c81c Merge pull request #1600 from samuelcambien/dspace-6_x-DS-3435
DS-3435 possible nullpointerexception at AccessStepUtil$populateEmbar…
2017-01-10 09:04:35 -06:00
Tom Desair
89ded55942 DS-3108 DSpace 6 only: Revert rename REST API login paramter email to user 2017-01-10 14:04:01 +01:00
Tom Desair
9855022228 Revert "DS-3108: Rename REST API login paramter email to user"
This reverts commit d2c4233d9e.
2017-01-10 13:57:29 +01:00
Tom Desair
bfc68d3354 DS-3446: Remove policies only after the bitstream has been updated (otherwise the current user has not WRITE rights) 2017-01-09 22:53:52 +01:00
Tom Desair
38848e16d3 DS-3108: Update REST API authentication documentation
Conflicts:
	dspace-rest/src/main/java/org/dspace/rest/RestIndex.java
2017-01-09 17:33:58 +01:00
Tom Desair
0244a425ae DS-3108: Remove deprication since there is no alternative 2017-01-09 17:32:55 +01:00
Tom Desair
c3c5287880 DS-3108: Remove unused imports 2017-01-09 17:32:49 +01:00
Tom Desair
3321cba560 DS-3108: Remove unnecessary /login-shibboleth endpoint
Conflicts:
	dspace-rest/src/main/java/org/dspace/rest/RestIndex.java
2017-01-09 17:32:45 +01:00
Tom Desair
684e87ed20 DS-3108: Return FORBIDDEN error code when authentication on the REST API failed
Conflicts:
	dspace-rest/src/main/java/org/dspace/rest/RestIndex.java
2017-01-09 17:31:24 +01:00
Tom Desair
d2c4233d9e DS-3108: Rename REST API login paramter email to user 2017-01-09 17:30:38 +01:00
Tom Desair
ae9862395a DS-3108: Support authenticaton mechanisms where the e-mail attribute is not an e-mail address 2017-01-09 17:30:26 +01:00
Tim Donohue
6256c673b9 Merge pull request #1607 from bram-atmire/DS-3289
DS-3289 Removing double slashes in image paths
2017-01-09 09:17:23 -06:00
Bram Luyten
2b0448fe64 DS-3289 Removing double slashes in image paths 2017-01-07 18:22:03 +01:00
cjuergen
1e4ae0b5e3 Cherry pick DS-3440 solution d95902b 2017-01-06 19:09:44 +01:00
Bram Luyten
1f36899abe Merge pull request #1605 from 4Science/DS-3441-6x
DS-3441 READ permssion on the Collection object not respected by the JSPUI (6_x)
2017-01-06 18:18:50 +01:00
Andrea Bollini
a6aa9816d2 DS-3441 READ permssion on the Collection object not respected by the JSPUI 2017-01-06 13:56:47 +01:00
Bram Luyten
242d1357c7 Merge pull request #1601 from tomdesair/DS-3381_Workspace-item-not-saved-when-using-versioning
DS-3381 workspace item not saved when using versioning
2017-01-05 16:43:50 +01:00
Tom Desair
4b927562b6 DS-3381: Do an explicit commit so that the workspace item is written to the database before the redirect to the submission form (see versioning.js doCreateNewVersion) 2017-01-04 23:05:20 +01:00
samuel
7b6ea8e807 DS-3435 possible nullpointerexception at AccessStepUtil$populateEmbargoDetail
Conflicts:
	dspace-xmlui/src/main/java/org/dspace/app/xmlui/aspect/submission/submit/AccessStepUtil.java
2017-01-03 12:40:56 +01:00
Philip Vissenaekens
a3c6aa2ced DS-3419 2016-12-09 13:14:55 +01:00
Ivan Masár
50eed239f5 DS-3363 CSV import error says "row", means "column" 2016-11-14 18:28:11 +01:00
Ivan Masár
3065389435 typo: xforwarderfor -> xforwardedfor 2016-11-01 16:18:45 +01:00
404 changed files with 16253 additions and 7146 deletions

1
.gitignore vendored
View File

@@ -6,6 +6,7 @@ tags
## Ignore project files created by Eclipse
.settings/
/bin/
.project
.classpath

View File

@@ -7,6 +7,8 @@ env:
# Install prerequisites for building Mirage2 more rapidly
before_install:
# Remove outdated settings.xml from Travis builds. Workaround for https://github.com/travis-ci/travis-ci/issues/4629
- rm ~/.m2/settings.xml
# Install Node.js 6.5.0 & print version info
- nvm install 6.5.0
- node --version
@@ -21,10 +23,10 @@ before_install:
# Print ruby version info (should be installed)
- ruby -v
# Install Sass & print version info
- gem install sass
- gem install sass -v 3.3.14
- sass -v
# Install Compass & print version info
- gem install compass
- gem install compass -v 1.0.1
- compass version
# Skip install stage, as we'll do it below

View File

@@ -1,7 +1,7 @@
DSpace source code license:
Copyright (c) 2002-2016, DuraSpace. All rights reserved.
Copyright (c) 2002-2018, DuraSpace. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are

View File

@@ -27,25 +27,22 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.10.50 - https://aws.amazon.com/sdkforjava)
* HPPC Collections (com.carrotsearch:hppc:0.5.2 - http://labs.carrotsearch.com/hppc.html/hppc)
* metadata-extractor (com.drewnoakes:metadata-extractor:2.6.2 - http://code.google.com/p/metadata-extractor/)
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.5.4 - http://github.com/FasterXML/jackson)
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.7.0 - http://github.com/FasterXML/jackson)
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.5.4 - https://github.com/FasterXML/jackson)
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.7.0 - https://github.com/FasterXML/jackson-core)
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.5.4 - http://github.com/FasterXML/jackson)
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.7.0 - http://github.com/FasterXML/jackson)
* Jackson-JAXRS-base (com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:2.5.4 - http://wiki.fasterxml.com/JacksonHome/jackson-jaxrs-base)
* Jackson-JAXRS-JSON (com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:2.5.4 - http://wiki.fasterxml.com/JacksonHome/jackson-jaxrs-json-provider)
* Jackson-module-JAXB-annotations (com.fasterxml.jackson.module:jackson-module-jaxb-annotations:2.5.4 - http://wiki.fasterxml.com/JacksonJAXBAnnotations)
* Google APIs Client Library for Java (com.google.api-client:google-api-client:1.21.0 - https://github.com/google/google-api-java-client/google-api-client)
* Google Analytics API v3-rev123-1.21.0 (com.google.apis:google-api-services-analytics:v3-rev123-1.21.0 - http://nexus.sonatype.org/oss-repository-hosting.html/google-api-services-analytics)
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.8.11 - http://github.com/FasterXML/jackson)
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.8.11 - https://github.com/FasterXML/jackson-core)
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.8.11.1 - http://github.com/FasterXML/jackson)
* Jackson-JAXRS-base (com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:2.8.11 - http://github.com/FasterXML/jackson-jaxrs-providers/jackson-jaxrs-base)
* Jackson-JAXRS-JSON (com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:2.8.11 - http://github.com/FasterXML/jackson-jaxrs-providers/jackson-jaxrs-json-provider)
* Jackson module: JAXB-annotations (com.fasterxml.jackson.module:jackson-module-jaxb-annotations:2.8.11 - http://github.com/FasterXML/jackson-module-jaxb-annotations)
* Google APIs Client Library for Java (com.google.api-client:google-api-client:1.23.0 - https://github.com/google/google-api-java-client/google-api-client)
* Google Analytics API v3-rev145-1.23.0 (com.google.apis:google-api-services-analytics:v3-rev145-1.23.0 - http://nexus.sonatype.org/oss-repository-hosting.html/google-api-services-analytics)
* FindBugs-jsr305 (com.google.code.findbugs:jsr305:3.0.1 - http://findbugs.sourceforge.net/)
* Gson (com.google.code.gson:gson:2.6.1 - https://github.com/google/gson/gson)
* Guava: Google Core Libraries for Java (com.google.guava:guava:14.0.1 - http://code.google.com/p/guava-libraries/guava)
* Guava: Google Core Libraries for Java (com.google.guava:guava:19.0 - https://github.com/google/guava/guava)
* Guava: Google Core Libraries for Java (JDK5 Backport) (com.google.guava:guava-jdk5:17.0 - http://code.google.com/p/guava-libraries/guava-jdk5)
* Google HTTP Client Library for Java (com.google.http-client:google-http-client:1.21.0 - https://github.com/google/google-http-java-client/google-http-client)
* Jackson 2 extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-jackson2:1.21.0 - https://github.com/google/google-http-java-client/google-http-client-jackson2)
* Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.21.0 - https://github.com/google/google-oauth-java-client/google-oauth-client)
* Google HTTP Client Library for Java (com.google.http-client:google-http-client:1.23.0 - https://github.com/google/google-http-java-client/google-http-client)
* Jackson 2 extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-jackson2:1.23.0 - https://github.com/google/google-http-java-client/google-http-client-jackson2)
* Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.23.0 - https://github.com/google/google-oauth-java-client/google-oauth-client)
* ConcurrentLinkedHashMap (com.googlecode.concurrentlinkedhashmap:concurrentlinkedhashmap-lru:1.2 - http://code.google.com/p/concurrentlinkedhashmap)
* ISO Parser (com.googlecode.mp4parser:isoparser:1.0-RC-1 - http://code.google.com/p/mp4parser/)
* builder-commons (com.lyncode:builder-commons:1.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/builder-commons)
@@ -53,6 +50,8 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Jtwig Core Functions (com.lyncode:jtwig-functions:2.0.1 - http://www.lyncode.com/jtwig-functions)
* Jtwig Spring (com.lyncode:jtwig-spring:2.0.1 - http://www.lyncode.com/jtwig-spring)
* Test Support (com.lyncode:test-support:1.0.3 - http://nexus.sonatype.org/oss-repository-hosting.html/test-support)
* MaxMind DB Reader (com.maxmind.db:maxmind-db:1.2.2 - http://dev.maxmind.com/)
* MaxMind GeoIP2 API (com.maxmind.geoip2:geoip2:2.11.0 - http://dev.maxmind.com/geoip/geoip2/web-services)
* Spatial4J (com.spatial4j:spatial4j:0.4.1 - https://github.com/spatial4j/spatial4j)
* Apache Commons BeanUtils (commons-beanutils:commons-beanutils:1.9.2 - http://commons.apache.org/proper/commons-beanutils/)
* Apache Commons CLI (commons-cli:commons-cli:1.3.1 - http://commons.apache.org/proper/commons-cli/)
@@ -60,7 +59,7 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Apache Commons Collections (commons-collections:commons-collections:3.2.2 - http://commons.apache.org/collections/)
* Apache Commons Configuration (commons-configuration:commons-configuration:1.10 - http://commons.apache.org/configuration/)
* Commons Digester (commons-digester:commons-digester:1.8.1 - http://commons.apache.org/digester/)
* Apache Commons FileUpload (commons-fileupload:commons-fileupload:1.3.1 - http://commons.apache.org/proper/commons-fileupload/)
* Apache Commons FileUpload (commons-fileupload:commons-fileupload:1.3.3 - http://commons.apache.org/proper/commons-fileupload/)
* HttpClient (commons-httpclient:commons-httpclient:3.1 - http://jakarta.apache.org/httpcomponents/httpclient-3.x/)
* Commons IO (commons-io:commons-io:2.4 - http://commons.apache.org/io/)
* commons-jexl (commons-jexl:commons-jexl:1.0 - no url defined)
@@ -69,7 +68,6 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Apache Commons Logging (commons-logging:commons-logging:1.2 - http://commons.apache.org/proper/commons-logging/)
* Apache Commons Validator (commons-validator:commons-validator:1.5.0 - http://commons.apache.org/proper/commons-validator/)
* Boilerpipe -- Boilerplate Removal and Fulltext Extraction from HTML pages (de.l3s.boilerpipe:boilerpipe:1.1.0 - http://code.google.com/p/boilerpipe/)
* The Netty Project (io.netty:netty:3.7.0.Final - http://netty.io/)
* jakarta-regexp (jakarta-regexp:jakarta-regexp:1.4 - no url defined)
* javax.inject (javax.inject:javax.inject:1 - http://code.google.com/p/atinject/)
* Bean Validation API (javax.validation:validation-api:1.1.0.Final - http://beanvalidation.org)
@@ -84,8 +82,8 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Abdera Core (org.apache.abdera:abdera-core:1.1.3 - http://abdera.apache.org/abdera-core)
* I18N Libraries (org.apache.abdera:abdera-i18n:1.1.3 - http://abdera.apache.org)
* Abdera Parser (org.apache.abdera:abdera-parser:1.1.3 - http://abdera.apache.org/abdera-parser)
* org.apache.tools.ant (org.apache.ant:ant:1.7.0 - http://ant.apache.org/ant/)
* ant-launcher (org.apache.ant:ant-launcher:1.7.0 - http://ant.apache.org/ant-launcher/)
* Apache Ant Core (org.apache.ant:ant:1.9.1 - http://ant.apache.org/)
* Apache Ant Launcher (org.apache.ant:ant-launcher:1.9.1 - http://ant.apache.org/)
* Avalon Framework API (org.apache.avalon.framework:avalon-framework-api:4.3.1 - http://www.apache.org/excalibur/avalon-framework/avalon-framework-api/)
* Avalon Framework Implementation (org.apache.avalon.framework:avalon-framework-impl:4.3.1 - http://www.apache.org/excalibur/avalon-framework/avalon-framework-impl/)
* Cocoon Configuration API (org.apache.cocoon:cocoon-configuration-api:1.0.2 - http://cocoon.apache.org/subprojects/configuration/1.0/configuration-api/1.0/)
@@ -111,6 +109,7 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Cocoon XML Implementation (org.apache.cocoon:cocoon-xml-impl:1.0.0 - http://cocoon.apache.org/2.2/core-modules/xml-impl/1.0/)
* Cocoon XML Resolver (org.apache.cocoon:cocoon-xml-resolver:1.0.0 - http://cocoon.apache.org/2.2/core-modules/xml-resolver/1.0/)
* Cocoon XML Utilities (org.apache.cocoon:cocoon-xml-util:1.0.0 - http://cocoon.apache.org/2.2/core-modules/xml-util/1.0/)
* Apache Commons Collections (org.apache.commons:commons-collections4:4.1 - http://commons.apache.org/proper/commons-collections/)
* Apache Commons Compress (org.apache.commons:commons-compress:1.7 - http://commons.apache.org/proper/commons-compress/)
* Apache Commons CSV (org.apache.commons:commons-csv:1.0 - http://commons.apache.org/proper/commons-csv/)
* Apache Commons DBCP (org.apache.commons:commons-dbcp2:2.1.1 - http://commons.apache.org/dbcp/)
@@ -174,11 +173,11 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Apache FontBox (org.apache.pdfbox:fontbox:2.0.2 - http://pdfbox.apache.org/)
* Apache JempBox (org.apache.pdfbox:jempbox:1.8.4 - http://www.apache.org/pdfbox-parent/jempbox/)
* Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.2 - http://www.apache.org/pdfbox-parent/pdfbox/)
* Apache POI (org.apache.poi:poi:3.13 - http://poi.apache.org/)
* Apache POI (org.apache.poi:poi-ooxml:3.13 - http://poi.apache.org/)
* Apache POI (org.apache.poi:poi:3.17 - http://poi.apache.org/)
* Apache POI (org.apache.poi:poi-ooxml:3.17 - http://poi.apache.org/)
* Apache POI (org.apache.poi:poi-ooxml-schemas:3.10.1 - http://poi.apache.org/)
* Apache POI (org.apache.poi:poi-ooxml-schemas:3.13 - http://poi.apache.org/)
* Apache POI (org.apache.poi:poi-scratchpad:3.13 - http://poi.apache.org/)
* Apache POI (org.apache.poi:poi-ooxml-schemas:3.17 - http://poi.apache.org/)
* Apache POI (org.apache.poi:poi-scratchpad:3.17 - http://poi.apache.org/)
* Apache Solr Search Server (org.apache.solr:solr:4.10.4 - http://lucene.apache.org/solr-parent/solr)
* Apache Solr Analysis Extras (org.apache.solr:solr-analysis-extras:4.10.4 - http://lucene.apache.org/solr-parent/solr-analysis-extras)
* Apache Solr Content Extraction Library (org.apache.solr:solr-cell:4.10.4 - http://lucene.apache.org/solr-parent/solr-cell)
@@ -191,6 +190,8 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Axiom API (org.apache.ws.commons.axiom:axiom-api:1.2.14 - http://ws.apache.org/axiom/)
* Axiom Impl (org.apache.ws.commons.axiom:axiom-impl:1.2.14 - http://ws.apache.org/axiom/)
* XmlBeans (org.apache.xmlbeans:xmlbeans:2.6.0 - http://xmlbeans.apache.org)
* Apache Yetus - Audience Annotations (org.apache.yetus:audience-annotations:0.5.0 - https://yetus.apache.org/audience-annotations)
* zookeeper (org.apache.zookeeper:zookeeper:3.4.11 - no url defined)
* zookeeper (org.apache.zookeeper:zookeeper:3.4.6 - no url defined)
* Evo Inflector (org.atteo:evo-inflector:1.2.1 - http://atteo.org/static/evo-inflector)
* TagSoup (org.ccil.cowan.tagsoup:tagsoup:1.2.1 - http://home.ccil.org/~cowan/XML/tagsoup/)
@@ -251,8 +252,7 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* oai4j (se.kb:oai4j:0.6b1 - http://oai4j-client.sourceforge.net/)
* StAX API (stax:stax-api:1.0.1 - http://stax.codehaus.org/)
* standard (taglibs:standard:1.1.2 - no url defined)
* Xalan Java Serializer (xalan:serializer:2.7.2 - http://xml.apache.org/xalan-j/)
* Xalan Java (xalan:xalan:2.7.2 - http://xml.apache.org/xalan-j/)
* xalan (xalan:xalan:2.7.0 - no url defined)
* Xerces2-j (xerces:xercesImpl:2.11.0 - https://xerces.apache.org/xerces2-j/)
* xmlParserAPIs (xerces:xmlParserAPIs:2.6.2 - no url defined)
* XML Commons External Components XML APIs (xml-apis:xml-apis:1.4.01 - http://xml.apache.org/commons/components/external/)
@@ -265,6 +265,7 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* XMP Library for Java (com.adobe.xmp:xmpcore:5.1.2 - http://www.adobe.com/devnet/xmp.html)
* coverity-escapers (com.coverity.security:coverity-escapers:1.1.1 - http://coverity.com/security)
* JSONLD Java :: Core (com.github.jsonld-java:jsonld-java:0.5.1 - http://github.com/jsonld-java/jsonld-java/jsonld-java/)
* curvesapi (com.github.virtuald:curvesapi:1.04 - https://github.com/virtuald/curvesapi)
* Protocol Buffer Java API (com.google.protobuf:protobuf-java:2.5.0 - http://code.google.com/p/protobuf)
* Jena IRI (com.hp.hpl.jena:iri:0.8 - http://jena.sf.net/iri)
* Jena (com.hp.hpl.jena:jena:2.6.4 - http://www.openjena.org/)
@@ -274,31 +275,30 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Biblio Transformation Engine :: Core (gr.ekt.bte:bte-core:0.9.3.5 - http://github.com/EKT/Biblio-Transformation-Engine/bte-core)
* Biblio Transformation Engine :: Input/Output (gr.ekt.bte:bte-io:0.9.3.5 - http://github.com/EKT/Biblio-Transformation-Engine/bte-io)
* jaxen (jaxen:jaxen:1.1.6 - http://jaxen.codehaus.org/)
* JLine (jline:jline:0.9.94 - http://jline.sourceforge.net)
* ANTLR 3 Runtime (org.antlr:antlr-runtime:3.5 - http://www.antlr.org)
* Morfologik FSA (org.carrot2:morfologik-fsa:1.7.1 - http://morfologik.blogspot.com/morfologik-fsa/)
* Morfologik Stemming Dictionary for Polish (org.carrot2:morfologik-polish:1.7.1 - http://morfologik.blogspot.com/morfologik-polish/)
* Morfologik Stemming APIs (org.carrot2:morfologik-stemming:1.7.1 - http://morfologik.blogspot.com/morfologik-stemming/)
* Stax2 API (org.codehaus.woodstox:stax2-api:3.1.1 - http://woodstox.codehaus.org/StAX2)
* DSpace Kernel :: API and Implementation (org.dspace:dspace-api:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api)
* DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:6.0.3 - https://github.com/dspace/dspace-api-lang)
* DSpace JSP-UI (org.dspace:dspace-jspui:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui)
* DSpace OAI-PMH (org.dspace:dspace-oai:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai)
* DSpace RDF (org.dspace:dspace-rdf:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf)
* DSpace REST :: API and Implementation (org.dspace:dspace-rest:6.0-rc4-SNAPSHOT - http://demo.dspace.org)
* DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services)
* Apache Solr Webapp (org.dspace:dspace-solr:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr)
* DSpace SWORD (org.dspace:dspace-sword:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword)
* DSpace SWORD v2 (org.dspace:dspace-swordv2:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2)
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui)
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:6.0.3 - https://github.com/dspace/dspace-xmlui-lang)
* DSpace Kernel :: API and Implementation (org.dspace:dspace-api:6.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api)
* DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:6.0.5 - https://github.com/dspace/dspace-api-lang)
* DSpace JSP-UI (org.dspace:dspace-jspui:6.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui)
* DSpace OAI-PMH (org.dspace:dspace-oai:6.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai)
* DSpace RDF (org.dspace:dspace-rdf:6.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf)
* DSpace REST :: API and Implementation (org.dspace:dspace-rest:6.3-SNAPSHOT - http://demo.dspace.org)
* DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:6.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services)
* Apache Solr Webapp (org.dspace:dspace-solr:6.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr)
* DSpace SWORD (org.dspace:dspace-sword:6.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword)
* DSpace SWORD v2 (org.dspace:dspace-swordv2:6.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2)
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:6.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui)
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:6.0.6 - https://github.com/dspace/dspace-xmlui-lang)
* handle (org.dspace:handle:6.2 - no url defined)
* jargon (org.dspace:jargon:1.4.25 - no url defined)
* mets (org.dspace:mets:1.5.2 - no url defined)
* oclc-harvester2 (org.dspace:oclc-harvester2:0.1.12 - no url defined)
* XOAI : OAI-PMH Java Toolkit (org.dspace:xoai:3.2.10 - http://nexus.sonatype.org/oss-repository-hosting.html/xoai)
* Repackaged Cocoon Servlet Service Implementation (org.dspace.dependencies.cocoon:dspace-cocoon-servlet-service-impl:1.0.3 - http://projects.dspace.org/dspace-pom/dspace-cocoon-servlet-service-impl)
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:6.3-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
* Hamcrest All (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all)
* Hamcrest Core (org.hamcrest:hamcrest-core:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-core)
* JBibTeX (org.jbibtex:jbibtex:1.0.10 - http://www.jbibtex.org)
@@ -310,6 +310,10 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* XMLUnit for Java (xmlunit:xmlunit:1.1 - http://xmlunit.sourceforge.net/)
* XMLUnit for Java (xmlunit:xmlunit:1.3 - http://xmlunit.sourceforge.net/)
BSD-Style License:
* JAXB2 Basics - Runtime (org.jvnet.jaxb2_commons:jaxb2-basics-runtime:0.9.5 - https://github.com/highsource/jaxb2-basics/jaxb2-basics-runtime)
Common Development and Distribution License (CDDL):
* JAXB Reference Implementation (com.sun.xml.bind:jaxb-impl:2.2.5 - http://jaxb.java.net/)
@@ -366,7 +370,6 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
GNU Lesser General Public License (LGPL):
* FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/)
* MaxMind GeoIP Legacy API (com.maxmind.geoip:geoip-api:1.3.0 - https://github.com/maxmind/geoip-api-java)
* JHighlight (com.uwyn:jhighlight:1.0 - https://jhighlight.dev.java.net/)
* DSpace TM-Extractors Dependency (org.dspace.dependencies:dspace-tm-extractors:1.0.1 - http://projects.dspace.org/dspace-pom/dspace-tm-extractors)
* A Hibernate O/RM Module (org.hibernate:hibernate-core:4.2.21.Final - http://hibernate.org)
@@ -391,6 +394,7 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Bouncy Castle CMS and S/MIME API (org.bouncycastle:bcmail-jdk15:1.46 - http://www.bouncycastle.org/java.html)
* Bouncy Castle Provider (org.bouncycastle:bcprov-jdk15:1.46 - http://www.bouncycastle.org/java.html)
* ORCID Java API generated via JAXB (org.dspace:orcid-jaxb-api:2.1.0 - https://github.com/DSpace/orcid-jaxb-api)
* Main (org.jmockit:jmockit:1.21 - http://www.jmockit.org)
* OpenCloud (org.mcavallo:opencloud:0.3 - http://opencloud.mcavallo.org/)
* Mockito (org.mockito:mockito-core:1.10.19 - http://www.mockito.org)
@@ -406,6 +410,10 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Javassist (org.javassist:javassist:3.18.1-GA - http://www.javassist.org/)
* Rhino (rhino:js:1.6R7 - http://www.mozilla.org/rhino/)
The PostgreSQL License:
* PostgreSQL JDBC Driver - JDBC 4.2 (org.postgresql:postgresql:42.2.1 - https://github.com/pgjdbc/pgjdbc)
Public Domain:
* AOP alliance (aopalliance:aopalliance:1.0 - http://aopalliance.sourceforge.net)
@@ -417,9 +425,9 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* JDOM (org.jdom:jdom:1.1.3 - http://www.jdom.org)
The PostgreSQL License:
The JSON License:
* PostgreSQL JDBC Driver - JDBC 4.2 (org.postgresql:postgresql:9.4.1211 - https://github.com/pgjdbc/pgjdbc)
* JSON in Java (org.json:json:20180130 - https://github.com/douglascrockford/JSON-java)
license.txt:

View File

@@ -12,7 +12,7 @@
<parent>
<groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId>
<version>6.1-SNAPSHOT</version>
<version>6.3</version>
<relativePath>..</relativePath>
</parent>
@@ -266,6 +266,9 @@
<include>**/*.xsl</include>
<include>**/*.xmap</include>
</includes>
<excludes>
<exclude>**/node/node_modules/**</exclude>
</excludes>
</validationSet>
</validationSets>
</configuration>
@@ -331,16 +334,6 @@
<groupId>org.apache.jena</groupId>
<artifactId>apache-jena-libs</artifactId>
<type>pom</type>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>commons-cli</groupId>
@@ -505,6 +498,11 @@
<artifactId>contiperf</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.rometools</groupId>
<artifactId>rome-modules</artifactId>
@@ -568,9 +566,9 @@
<artifactId>commons-configuration</artifactId>
</dependency>
<dependency>
<groupId>com.maxmind.geoip</groupId>
<artifactId>geoip-api</artifactId>
<version>1.3.0</version>
<groupId>com.maxmind.geoip2</groupId>
<artifactId>geoip2</artifactId>
<version>2.11.0</version>
</dependency>
<dependency>
<groupId>org.apache.ant</groupId>
@@ -696,7 +694,7 @@
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-client</artifactId>
<version>2.22.1</version>
<version>${jersey.version}</version>
</dependency>
<!-- S3 -->
<dependency>
@@ -708,27 +706,19 @@
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- S3 also wanted jackson... -->
<!-- For ORCID v2 integration -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>2.7.0</version>
<groupId>org.dspace</groupId>
<artifactId>orcid-jaxb-api</artifactId>
<version>2.1.0</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.7.0</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>2.7.0</version>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20180130</version>
</dependency>
</dependencies>

View File

@@ -196,7 +196,7 @@ public class DSpaceCSV implements Serializable
StringBuilder lineBuilder = new StringBuilder();
String lineRead;
while (StringUtils.isNotBlank(lineRead = input.readLine()))
while ((lineRead = input.readLine()) != null)
{
if (lineBuilder.length() > 0) {
// Already have a previously read value - add this line

View File

@@ -34,6 +34,8 @@ public class MetadataExport
protected ItemService itemService;
protected Context context;
/** Whether to export all metadata, or just normally edited metadata */
protected boolean exportAll;
@@ -55,6 +57,7 @@ public class MetadataExport
// Store the export settings
this.toExport = toExport;
this.exportAll = exportAll;
this.context = c;
}
/**
@@ -73,6 +76,7 @@ public class MetadataExport
// Try to export the community
this.toExport = buildFromCommunity(c, toExport, 0);
this.exportAll = exportAll;
this.context = c;
}
catch (SQLException sqle)
{
@@ -144,13 +148,19 @@ public class MetadataExport
{
try
{
Context.Mode originalMode = context.getCurrentMode();
context.setMode(Context.Mode.READ_ONLY);
// Process each item
DSpaceCSV csv = new DSpaceCSV(exportAll);
while (toExport.hasNext())
{
csv.addItem(toExport.next());
Item item = toExport.next();
csv.addItem(item);
context.uncacheEntity(item);
}
context.setMode(originalMode);
// Return the results
return csv;
}
@@ -224,7 +234,7 @@ public class MetadataExport
String filename = line.getOptionValue('f');
// Create a context
Context c = new Context();
Context c = new Context(Context.Mode.READ_ONLY);
c.turnOffAuthorisationSystem();
// The things we'll export

View File

@@ -31,6 +31,7 @@ import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.workflow.WorkflowItem;
import org.dspace.workflow.WorkflowService;
import org.dspace.workflow.factory.WorkflowServiceFactory;
@@ -122,6 +123,9 @@ public class MetadataImport
// Make the changes
try
{
Context.Mode originalMode = c.getCurrentMode();
c.setMode(Context.Mode.BATCH_EDIT);
// Process each change
for (DSpaceCSVLine line : toImport)
{
@@ -134,11 +138,15 @@ public class MetadataImport
throw new MetadataImportException("'action' not allowed for new items!");
}
WorkspaceItem wsItem = null;
WorkflowItem wfItem = null;
Item item = null;
// Is this a new item?
if (id != null)
{
// Get the item
Item item = itemService.find(c, id);
item = itemService.find(c, id);
if (item == null)
{
throw new MetadataImportException("Unknown item ID " + id);
@@ -345,8 +353,8 @@ public class MetadataImport
// Create the item
String collectionHandle = line.get("collection").get(0);
collection = (Collection) handleService.resolveToObject(c, collectionHandle);
WorkspaceItem wsItem = workspaceItemService.create(c, collection, useTemplate);
Item item = wsItem.getItem();
wsItem = workspaceItemService.create(c, collection, useTemplate);
item = wsItem.getItem();
// Add the metadata to the item
for (BulkEditMetadataValue dcv : whatHasChanged.getAdds())
@@ -364,9 +372,9 @@ public class MetadataImport
if(useWorkflow){
WorkflowService workflowService = WorkflowServiceFactory.getInstance().getWorkflowService();
if (workflowNotify) {
workflowService.start(c, wsItem);
wfItem = workflowService.start(c, wsItem);
} else {
workflowService.startWithoutNotify(c, wsItem);
wfItem = workflowService.startWithoutNotify(c, wsItem);
}
}
else
@@ -394,7 +402,16 @@ public class MetadataImport
// Record the changes
changes.add(whatHasChanged);
}
if (change) {
//only clear cache if changes have been made.
c.uncacheEntity(wsItem);
c.uncacheEntity(wfItem);
c.uncacheEntity(item);
}
}
c.setMode(originalMode);
}
catch (MetadataImportException mie)
{

View File

@@ -91,16 +91,16 @@ public class MetadataImportInvalidHeadingException extends Exception
{
if (type == SCHEMA)
{
return "Unknown metadata schema in row " + column + ": " + badHeading;
return "Unknown metadata schema in column " + column + ": " + badHeading;
} else if (type == ELEMENT)
{
return "Unknown metadata element in row " + column + ": " + badHeading;
return "Unknown metadata element in column " + column + ": " + badHeading;
} else if (type == MISSING)
{
return "Row with missing header: Row " + column;
return "Row with missing header: column " + column;
} else
{
return "Bad metadata declaration in row " + column + ": " + badHeading;
return "Bad metadata declaration in column" + column + ": " + badHeading;
}
}
}
}

View File

@@ -7,36 +7,32 @@
*/
package org.dspace.app.harvest;
import org.apache.commons.cli.*;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.harvest.HarvestedCollection;
import org.dspace.harvest.HarvestingException;
import org.dspace.harvest.OAIHarvester;
import org.dspace.harvest.factory.HarvestServiceFactory;
import org.dspace.harvest.service.HarvestedCollectionService;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.ItemService;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.harvest.HarvestedCollection;
import org.dspace.content.Item;
import org.dspace.harvest.HarvestingException;
import org.dspace.harvest.OAIHarvester;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.harvest.factory.HarvestServiceFactory;
import org.dspace.harvest.service.HarvestedCollectionService;
/**
* Test class for harvested collections.
*
@@ -91,7 +87,7 @@ public class Harvest
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("Harvest\n", options);
System.out
.println("\nPING OAI server: Harvest -g -s oai_source -i oai_set_id");
.println("\nPING OAI server: Harvest -g -a oai_source -i oai_set_id");
System.out
.println("RUNONCE harvest with arbitrary options: Harvest -o -e eperson -c collection -t harvest_type -a oai_source -i oai_set_id -m metadata_format");
System.out
@@ -162,7 +158,7 @@ public class Harvest
// Instantiate our class
Harvest harvester = new Harvest();
harvester.context = new Context();
harvester.context = new Context(Context.Mode.BATCH_EDIT);
// Check our options
@@ -375,6 +371,8 @@ public class Harvest
Item item = it.next();
System.out.println("Deleting: " + item.getHandle());
collectionService.removeItem(context, collection, item);
context.uncacheEntity(item);
// Dispatch events every 50 items
if (i%50 == 0) {
context.dispatchEvents();

View File

@@ -176,7 +176,7 @@ public class ItemExportCLITool {
System.exit(1);
}
Context c = new Context();
Context c = new Context(Context.Mode.READ_ONLY);
c.turnOffAuthorisationSystem();
if (myType == Constants.ITEM)

View File

@@ -7,44 +7,28 @@
*/
package org.dspace.app.itemexport;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import javax.mail.MessagingException;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.app.itemexport.service.ItemExportService;
import org.dspace.content.*;
import org.dspace.content.Collection;
import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.I18nUtil;
import org.dspace.core.LogManager;
import org.dspace.core.Utils;
import org.dspace.core.Email;
import org.dspace.core.*;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.service.HandleService;
import org.springframework.beans.factory.annotation.Autowired;
import javax.mail.MessagingException;
import java.io.*;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
/**
* Item exporter to create simple AIPs for DSpace content. Currently exports
* individual items, or entire collections. For instructions on use, see
@@ -129,7 +113,9 @@ public class ItemExportServiceImpl implements ItemExportService
}
System.out.println("Exporting item to " + mySequenceNumber);
exportItem(c, i.next(), fullPath, mySequenceNumber, migrate, excludeBitstreams);
Item item = i.next();
exportItem(c, item, fullPath, mySequenceNumber, migrate, excludeBitstreams);
c.uncacheEntity(item);
mySequenceNumber++;
}
}
@@ -282,7 +268,7 @@ public class ItemExportServiceImpl implements ItemExportService
("date".equals(metadataField.getElement()) && "accessioned".equals(qualifier)) ||
("date".equals(metadataField.getElement()) && "available".equals(qualifier)) ||
("identifier".equals(metadataField.getElement()) && "uri".equals(qualifier) &&
(dcv.getValue() != null && dcv.getValue().startsWith("http://hdl.handle.net/" +
(dcv.getValue() != null && dcv.getValue().startsWith(handleService.getCanonicalPrefix() +
handleService.getPrefix() + "/"))) ||
("description".equals(metadataField.getElement()) && "provenance".equals(qualifier)) ||
("format".equals(metadataField.getElement()) && "extent".equals(qualifier)) ||

View File

@@ -189,7 +189,7 @@ public class ItemImportCLITool {
String zipfilename = "";
if (line.hasOption('z')) {
zip = true;
zipfilename = sourcedir + System.getProperty("file.separator") + line.getOptionValue('z');
zipfilename = line.getOptionValue('z');
}
//By default assume collections will be given on the command line
@@ -294,7 +294,7 @@ public class ItemImportCLITool {
myloader.setQuiet(isQuiet);
// create a context
Context c = new Context();
Context c = new Context(Context.Mode.BATCH_EDIT);
// find the EPerson, assign to context
EPerson myEPerson = null;

View File

@@ -14,21 +14,6 @@ import gr.ekt.bte.core.TransformationSpec;
import gr.ekt.bte.dataloader.FileDataLoader;
import gr.ekt.bteio.generators.DSpaceOutputGenerator;
import gr.ekt.bteio.loaders.OAIPMHDataLoader;
import java.io.*;
import java.net.URL;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.zip.ZipFile;
import java.util.zip.ZipEntry;
import javax.mail.MessagingException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import org.apache.commons.collections.ComparatorUtils;
import org.apache.commons.io.FileDeleteStrategy;
import org.apache.commons.io.FileUtils;
@@ -46,18 +31,14 @@ import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.*;
import org.dspace.content.Collection;
import org.dspace.content.service.*;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.Email;
import org.dspace.core.I18nUtil;
import org.dspace.core.LogManager;
import org.dspace.core.*;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.eperson.service.EPersonService;
import org.dspace.eperson.service.GroupService;
import org.dspace.handle.service.HandleService;
import org.dspace.utils.DSpace;
import org.dspace.workflow.WorkflowItem;
import org.dspace.workflow.WorkflowService;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
@@ -67,6 +48,19 @@ import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import javax.mail.MessagingException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import java.io.*;
import java.net.URL;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
/**
* Import items into DSpace. The conventional use is upload files by copying
@@ -341,7 +335,8 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
{
clist = mycollections;
}
addItem(c, clist, sourceDir, dircontents[i], mapOut, template);
Item item = addItem(c, clist, sourceDir, dircontents[i], mapOut, template);
c.uncacheEntity(item);
System.out.println(i + " " + dircontents[i]);
}
}
@@ -414,7 +409,9 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
handleOut.close();
deleteItem(c, oldItem);
addItem(c, mycollections, sourceDir, newItemName, null, template);
Item newItem = addItem(c, mycollections, sourceDir, newItemName, null, template);
c.uncacheEntity(oldItem);
c.uncacheEntity(newItem);
}
}
@@ -445,6 +442,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
Item myitem = itemService.findByIdOrLegacyId(c, itemID);
System.out.println("Deleting item " + itemID);
deleteItem(c, myitem);
c.uncacheEntity(myitem);
}
}
}
@@ -470,6 +468,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
// create workspace item
Item myitem = null;
WorkspaceItem wi = null;
WorkflowItem wfi = null;
if (!isTest)
{
@@ -495,9 +494,9 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
{
// Should we send a workflow alert email or not?
if (useWorkflowSendEmail) {
workflowService.start(c, wi);
wfi = workflowService.start(c, wi);
} else {
workflowService.startWithoutNotify(c, wi);
wfi = workflowService.startWithoutNotify(c, wi);
}
// send ID to the mapfile
@@ -553,6 +552,10 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
mapOut.println(mapOutputString);
}
//Clear intermediary objects from the cache
c.uncacheEntity(wi);
c.uncacheEntity(wfi);
return myitem;
}
@@ -590,6 +593,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
else
{
deleteItem(c, myitem);
c.uncacheEntity(myitem);
}
}
@@ -712,6 +716,10 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
{
value = "";
}
else
{
value = value.trim();
}
// //getElementData(n, "element");
String element = getAttributeValue(n, "element");
String qualifier = getAttributeValue(n, "qualifier"); //NodeValue();
@@ -733,8 +741,8 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
{
qualifier = null;
}
if (!isTest)
// only add metadata if it is no test and there is a real value
if (!isTest && !value.equals(""))
{
itemService.addMetadata(c, i, schema, element, qualifier, language, value);
}

View File

@@ -9,12 +9,18 @@ package org.dspace.app.itemupdate;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FilenameFilter;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
@@ -22,6 +28,7 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang.StringUtils;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
@@ -30,6 +37,8 @@ import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
/**
*
@@ -75,6 +84,7 @@ public class ItemUpdate {
protected static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService();
protected static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
protected static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
static
{
@@ -353,15 +363,11 @@ public class ItemUpdate {
pr("ItemUpdate - initializing run on " + (new Date()).toString());
context = new Context();
context = new Context(Context.Mode.BATCH_EDIT);
iu.setEPerson(context, iu.eperson);
context.turnOffAuthorisationSystem();
HANDLE_PREFIX = ConfigurationManager.getProperty("handle.canonical.prefix");
if (HANDLE_PREFIX == null || HANDLE_PREFIX.length() == 0)
{
HANDLE_PREFIX = "http://hdl.handle.net/";
}
HANDLE_PREFIX = handleService.getCanonicalPrefix();
iu.processArchive(context, sourcedir, itemField, metadataIndexName, alterProvenance, isTest);
@@ -460,6 +466,7 @@ public class ItemUpdate {
{
Item item = itarch.getItem();
itemService.update(context, item); //need to update before commit
context.uncacheEntity(item);
}
ItemUpdate.pr("Item " + dirname + " completed");
successItemCount++;

View File

@@ -44,18 +44,20 @@ public interface FormatFilter
public String getFormatString();
/**
* @return string to describe the newly-generated Bitstream's - how it was
* @return string to describe the newly-generated Bitstream - how it was
* produced is a good idea
*/
public String getDescription();
/**
* Read the source stream and produce the filtered content.
*
* @param item Item
* @param source
* input stream
* @param verbose verbosity flag
*
* @return result of filter's transformation, written out to a bitstream
* @return result of filter's transformation as a byte stream.
* @throws Exception if error
*/
public InputStream getDestinationStream(Item item, InputStream source, boolean verbose)

View File

@@ -46,11 +46,4 @@ public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter {
}
}
public static final String[] PDF = {"Adobe PDF"};
@Override
public String[] getInputMIMETypes()
{
return PDF;
}
}

View File

@@ -23,6 +23,7 @@ import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.im4java.core.ConvertCmd;
import org.im4java.core.Info;
import org.im4java.core.IM4JavaException;
import org.im4java.core.IMOperation;
import org.im4java.process.ProcessStarter;
@@ -34,175 +35,171 @@ import org.dspace.core.ConfigurationManager;
* thumbnail.maxwidth, thumbnail.maxheight, the size we want our thumbnail to be
* no bigger than. Creates only JPEGs.
*/
public abstract class ImageMagickThumbnailFilter extends MediaFilter implements SelfRegisterInputFormats
{
protected static int width = 180;
protected static int height = 120;
private static boolean flatten = true;
static String bitstreamDescription = "IM Thumbnail";
static final String defaultPattern = "Generated Thumbnail";
static Pattern replaceRegex = Pattern.compile(defaultPattern);
protected final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
static {
String pre = ImageMagickThumbnailFilter.class.getName();
String s = ConfigurationManager.getProperty(pre + ".ProcessStarter");
ProcessStarter.setGlobalSearchPath(s);
width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width);
height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height);
public abstract class ImageMagickThumbnailFilter extends MediaFilter {
protected static int width = 180;
protected static int height = 120;
private static boolean flatten = true;
static String bitstreamDescription = "IM Thumbnail";
static final String defaultPattern = "Generated Thumbnail";
static Pattern replaceRegex = Pattern.compile(defaultPattern);
protected final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
static String cmyk_profile;
static String srgb_profile;
static {
String pre = ImageMagickThumbnailFilter.class.getName();
String s = ConfigurationManager.getProperty(pre + ".ProcessStarter");
ProcessStarter.setGlobalSearchPath(s);
width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width);
height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height);
flatten = ConfigurationManager.getBooleanProperty(pre + ".flatten", flatten);
String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription");
if (description != null) {
bitstreamDescription = description;
}
try {
String patt = ConfigurationManager.getProperty(pre + ".replaceRegex");
replaceRegex = Pattern.compile(patt == null ? defaultPattern : patt);
} catch(PatternSyntaxException e) {
System.err.println("Invalid thumbnail replacement pattern: "+e.getMessage());
}
}
public ImageMagickThumbnailFilter() {
}
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".jpg";
}
/**
* @return String bundle name
*
*/
@Override
public String getBundleName()
{
return "THUMBNAIL";
}
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString()
{
return "JPEG";
}
/**
* @return String bitstreamDescription
*/
@Override
public String getDescription()
{
return bitstreamDescription;
}
public File inputStreamToTempFile(InputStream source, String prefix, String suffix) throws IOException {
File f = File.createTempFile(prefix, suffix);
f.deleteOnExit();
FileOutputStream fos = new FileOutputStream(f);
byte[] buffer = new byte[1024];
int len = source.read(buffer);
while (len != -1) {
fos.write(buffer, 0, len);
len = source.read(buffer);
}
fos.close();
return f;
}
public File getThumbnailFile(File f, boolean verbose) throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
op.addImage(f.getAbsolutePath());
op.thumbnail(width, height);
op.addImage(f2.getAbsolutePath());
if (verbose) {
System.out.println("IM Thumbnail Param: "+op);
}
cmd.run(op);
return f2;
}
public File getImageFile(File f, int page, boolean verbose) throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
String s = "[" + page + "]";
op.addImage(f.getAbsolutePath()+s);
if (flatten)
{
op.flatten();
String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription");
cmyk_profile = ConfigurationManager.getProperty(pre + ".cmyk_profile");
srgb_profile = ConfigurationManager.getProperty(pre + ".srgb_profile");
if (description != null) {
bitstreamDescription = description;
}
try {
String patt = ConfigurationManager.getProperty(pre + ".replaceRegex");
replaceRegex = Pattern.compile(patt == null ? defaultPattern : patt);
} catch (PatternSyntaxException e) {
System.err.println("Invalid thumbnail replacement pattern: " + e.getMessage());
}
op.addImage(f2.getAbsolutePath());
if (verbose) {
System.out.println("IM Image Param: "+op);
}
cmd.run(op);
return f2;
}
@Override
public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose)
throws Exception
{
String nsrc = source.getName();
for(Bundle b: itemService.getBundles(item, "THUMBNAIL")) {
for(Bitstream bit: b.getBitstreams()) {
String n = bit.getName();
if (n != null) {
if (nsrc != null) {
if (!n.startsWith(nsrc)) continue;
}
}
String description = bit.getDescription();
//If anything other than a generated thumbnail is found, halt processing
if (description != null) {
if (replaceRegex.matcher(description).matches()) {
if (verbose) {
System.out.println(description + " " + nsrc + " matches pattern and is replacable.");
}
continue;
}
if (description.equals(bitstreamDescription)) {
if (verbose) {
System.out.println(bitstreamDescription + " " + nsrc + " is replacable.");
}
continue;
}
}
System.out.println("Custom Thumbnail exists for " + nsrc + " for item " + item.getHandle() + ". Thumbnail will not be generated. ");
return false;
}
}
return true; //assume that the thumbnail is a custom one
}
@Override
public String[] getInputMIMETypes()
{
return ImageIO.getReaderMIMETypes();
}
public ImageMagickThumbnailFilter() {
}
@Override
public String[] getInputDescriptions()
{
return null;
}
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".jpg";
}
/**
* @return String bundle name
*
*/
@Override
public String getBundleName() {
return "THUMBNAIL";
}
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString() {
return "JPEG";
}
/**
* @return String bitstreamDescription
*/
@Override
public String getDescription() {
return bitstreamDescription;
}
public File inputStreamToTempFile(InputStream source, String prefix, String suffix) throws IOException {
File f = File.createTempFile(prefix, suffix);
f.deleteOnExit();
FileOutputStream fos = new FileOutputStream(f);
byte[] buffer = new byte[1024];
int len = source.read(buffer);
while (len != -1) {
fos.write(buffer, 0, len);
len = source.read(buffer);
}
fos.close();
return f;
}
public File getThumbnailFile(File f, boolean verbose)
throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
op.autoOrient();
op.addImage(f.getAbsolutePath());
op.thumbnail(width, height);
op.addImage(f2.getAbsolutePath());
if (verbose) {
System.out.println("IM Thumbnail Param: " + op);
}
cmd.run(op);
return f2;
}
public File getImageFile(File f, int page, boolean verbose)
throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
String s = "[" + page + "]";
op.addImage(f.getAbsolutePath() + s);
if (flatten) {
op.flatten();
}
// PDFs using the CMYK color system can be handled specially if
// profiles are defined
if (cmyk_profile != null && srgb_profile != null) {
Info imageInfo = new Info(f.getAbsolutePath(), true);
String imageClass = imageInfo.getImageClass();
if (imageClass.contains("CMYK")) {
op.profile(cmyk_profile);
op.profile(srgb_profile);
}
}
op.addImage(f2.getAbsolutePath());
if (verbose) {
System.out.println("IM Image Param: " + op);
}
cmd.run(op);
return f2;
}
@Override
public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose) throws Exception {
String nsrc = source.getName();
for (Bundle b : itemService.getBundles(item, "THUMBNAIL")) {
for (Bitstream bit : b.getBitstreams()) {
String n = bit.getName();
if (n != null) {
if (nsrc != null) {
if (!n.startsWith(nsrc))
continue;
}
}
String description = bit.getDescription();
// If anything other than a generated thumbnail
// is found, halt processing
if (description != null) {
if (replaceRegex.matcher(description).matches()) {
if (verbose) {
System.out.println(description + " " + nsrc
+ " matches pattern and is replacable.");
}
continue;
}
if (description.equals(bitstreamDescription)) {
if (verbose) {
System.out.println(bitstreamDescription + " " + nsrc
+ " is replacable.");
}
continue;
}
}
System.out.println("Custom Thumbnail exists for " + nsrc + " for item "
+ item.getHandle() + ". Thumbnail will not be generated. ");
return false;
}
}
return true; // assume that the thumbnail is a custom one
}
@Override
public String[] getInputExtensions()
{
return ImageIO.getReaderFileSuffixes();
}
}

View File

@@ -7,9 +7,6 @@
*/
package org.dspace.app.mediafilter;
import java.io.InputStream;
import java.util.*;
import org.dspace.app.mediafilter.service.MediaFilterService;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.*;
@@ -24,6 +21,10 @@ import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.IOException;
import java.io.InputStream;
import java.util.*;
/**
* MediaFilterManager is the class that invokes the media/format filters over the
* repository's content. A few command line flags affect the operation of the
@@ -161,6 +162,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
++processed;
}
// clear item objects from context cache and internal cache
c.uncacheEntity(currentItem);
currentItem = null;
}
}
@@ -313,12 +315,10 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
// get bitstream filename, calculate destination filename
String newName = formatFilter.getFilteredName(source.getName());
Bitstream existingBitstream = null; // is there an existing rendition?
Bundle targetBundle = null; // bundle we're modifying
List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName());
// check if destination bitstream exists
Bundle existingBundle = null;
Bitstream existingBitstream = null;
List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName());
if (bundles.size() > 0)
{
// only finds the last match (FIXME?)
@@ -326,14 +326,13 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
List<Bitstream> bitstreams = bundle.getBitstreams();
for (Bitstream bitstream : bitstreams) {
if (bitstream.getName().equals(newName)) {
targetBundle = bundle;
if (bitstream.getName().trim().equals(newName.trim())) {
existingBundle = bundle;
existingBitstream = bitstream;
}
}
}
}
// if exists and overwrite = false, exit
if (!overWrite && (existingBitstream != null))
{
@@ -351,67 +350,76 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
+ " (item: " + item.getHandle() + ")");
}
InputStream destStream;
try {
System.out.println("File: " + newName);
destStream = formatFilter.getDestinationStream(item, bitstreamService.retrieve(context, source), isVerbose);
System.out.println("File: " + newName);
// start filtering of the bitstream, using try with resource to close all InputStreams properly
try (
// get the source stream
InputStream srcStream = bitstreamService.retrieve(context, source);
// filter the source stream to produce the destination stream
// this is the hard work, check for OutOfMemoryErrors at the end of the try clause.
InputStream destStream = formatFilter.getDestinationStream(item, srcStream, isVerbose);
) {
if (destStream == null) {
if (!isQuiet) {
System.out.println("SKIPPED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") because filtering was unsuccessful");
}
return false;
}
Bundle targetBundle; // bundle we're modifying
if (bundles.size() < 1)
{
// create new bundle if needed
targetBundle = bundleService.create(context, item, formatFilter.getBundleName());
}
else
{
// take the first match as we already looked out for the correct bundle name
targetBundle = bundles.get(0);
}
// create bitstream to store the filter result
Bitstream b = bitstreamService.create(context, targetBundle, destStream);
// set the name, source and description of the bitstream
b.setName(context, newName);
b.setSource(context, "Written by FormatFilter " + formatFilter.getClass().getName() +
" on " + DCDate.getCurrent() + " (GMT).");
b.setDescription(context, formatFilter.getDescription());
// Set the format of the bitstream
BitstreamFormat bf = bitstreamFormatService.findByShortDescription(context,
formatFilter.getFormatString());
bitstreamService.setFormat(context, b, bf);
bitstreamService.update(context, b);
//Set permissions on the derivative bitstream
//- First remove any existing policies
authorizeService.removeAllPolicies(context, b);
//- Determine if this is a public-derivative format
if(publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) {
//- Set derivative bitstream to be publicly accessible
Group anonymous = groupService.findByName(context, Group.ANONYMOUS);
authorizeService.addPolicy(context, b, Constants.READ, anonymous);
} else {
//- Inherit policies from the source bitstream
authorizeService.inheritPolicies(context, source, b);
}
//do post-processing of the generated bitstream
formatFilter.postProcessBitstream(context, item, b);
} catch (OutOfMemoryError oome) {
System.out.println("!!! OutOfMemoryError !!!");
return false;
}
// create new bundle if needed
if (bundles.size() < 1)
{
targetBundle = bundleService.create(context, item, formatFilter.getBundleName());
}
else
{
// take the first match
targetBundle = bundles.get(0);
}
Bitstream b = bitstreamService.create(context, targetBundle, destStream);
// Now set the format and name of the bitstream
b.setName(context, newName);
b.setSource(context, "Written by FormatFilter " + formatFilter.getClass().getName() +
" on " + DCDate.getCurrent() + " (GMT).");
b.setDescription(context, formatFilter.getDescription());
// Find the proper format
BitstreamFormat bf = bitstreamFormatService.findByShortDescription(context,
formatFilter.getFormatString());
bitstreamService.setFormat(context, b, bf);
bitstreamService.update(context, b);
//Set permissions on the derivative bitstream
//- First remove any existing policies
authorizeService.removeAllPolicies(context, b);
//- Determine if this is a public-derivative format
if(publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) {
//- Set derivative bitstream to be publicly accessible
Group anonymous = groupService.findByName(context, Group.ANONYMOUS);
authorizeService.addPolicy(context, b, Constants.READ, anonymous);
} else {
//- Inherit policies from the source bitstream
authorizeService.inheritPolicies(context, source, b);
}
// fixme - set date?
// we are overwriting, so remove old bitstream
if (existingBitstream != null)
{
bundleService.removeBitstream(context, targetBundle, existingBitstream);
bundleService.removeBitstream(context, existingBundle, existingBitstream);
}
if (!isQuiet)
@@ -420,9 +428,6 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
+ " (item: " + item.getHandle() + ") and created '" + newName + "'");
}
//do post-processing of the generated bitstream
formatFilter.postProcessBitstream(context, item, b);
return true;
}

View File

@@ -0,0 +1,81 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.IOException;
import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory;
import org.apache.poi.openxml4j.exceptions.OpenXML4JException;
import org.apache.xmlbeans.XmlException;
import org.dspace.content.Item;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Extract flat text from Microsoft Word documents (.doc, .docx).
*/
public class PoiWordFilter
extends MediaFilter
{
private static final Logger LOG = LoggerFactory.getLogger(PoiWordFilter.class);
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".txt";
}
@Override
public String getBundleName()
{
return "TEXT";
}
@Override
public String getFormatString()
{
return "Text";
}
@Override
public String getDescription()
{
return "Extracted text";
}
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception
{
String text;
try
{
// get input stream from bitstream, pass to filter, get string back
POITextExtractor extractor = ExtractorFactory.createExtractor(source);
text = extractor.getText();
}
catch (IOException | OpenXML4JException | XmlException e)
{
System.err.format("Invalid File Format: %s%n", e.getMessage());
LOG.error("Unable to parse the bitstream: ", e);
throw e;
}
// if verbose flag is set, print out extracted text to STDOUT
if (verbose)
{
System.out.println(text);
}
// return the extracted text as a stream.
return new ByteArrayInputStream(text.getBytes());
}
}

View File

@@ -7,26 +7,14 @@
*/
package org.dspace.app.packager;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.sql.SQLException;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.*;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.content.packager.PackageDisseminator;
import org.dspace.content.packager.PackageException;
import org.dspace.content.packager.PackageParameters;
import org.dspace.content.packager.PackageIngester;
import org.dspace.content.packager.PackageParameters;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.factory.CoreServiceFactory;
@@ -36,6 +24,10 @@ import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.workflow.WorkflowException;
import java.io.*;
import java.sql.SQLException;
import java.util.List;
/**
* Command-line interface to the Packager plugin.
* <p>
@@ -331,6 +323,7 @@ public class Packager
//If we are in REPLACE mode
if(pkgParams.replaceModeEnabled())
{
context.setMode(Context.Mode.BATCH_EDIT);
PackageIngester sip = (PackageIngester) pluginService
.getNamedPlugin(PackageIngester.class, myPackager.packageType);
if (sip == null)
@@ -394,6 +387,8 @@ public class Packager
//else if normal SUBMIT mode (or basic RESTORE mode -- which is a special type of submission)
else if (myPackager.submit || pkgParams.restoreModeEnabled())
{
context.setMode(Context.Mode.BATCH_EDIT);
PackageIngester sip = (PackageIngester) pluginService
.getNamedPlugin(PackageIngester.class, myPackager.packageType);
if (sip == null)
@@ -445,6 +440,8 @@ public class Packager
}// else, if DISSEMINATE mode
else
{
context.setMode(Context.Mode.READ_ONLY);
//retrieve specified package disseminator
PackageDisseminator dip = (PackageDisseminator) pluginService
.getNamedPlugin(PackageDisseminator.class, myPackager.packageType);

View File

@@ -7,28 +7,9 @@
*/
package org.dspace.app.sitemap;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.sql.SQLException;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.cli.*;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.Community;
@@ -42,6 +23,16 @@ import org.dspace.core.LogManager;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.sql.SQLException;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
/**
* Command-line utility for generating HTML and Sitemaps.org protocol Sitemaps.
*
@@ -188,7 +179,7 @@ public class GenerateSitemaps
+ "?map=", null);
}
Context c = new Context();
Context c = new Context(Context.Mode.READ_ONLY);
List<Community> comms = communityService.findAll(c);
@@ -201,6 +192,8 @@ public class GenerateSitemaps
if (makeSitemapOrg) {
sitemapsOrg.addURL(url, null);
}
c.uncacheEntity(comm);
}
List<Collection> colls = collectionService.findAll(c);
@@ -214,6 +207,8 @@ public class GenerateSitemaps
if (makeSitemapOrg) {
sitemapsOrg.addURL(url, null);
}
c.uncacheEntity(coll);
}
Iterator<Item> allItems = itemService.findAll(c);
@@ -234,6 +229,8 @@ public class GenerateSitemaps
sitemapsOrg.addURL(url, lastMod);
}
c.uncacheEntity(i);
itemCount++;
}

View File

@@ -0,0 +1,112 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.util;
import org.apache.log4j.Logger;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Context;
import java.sql.SQLException;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
/**
* This comparator is used to order files of an item, so that they are ordered in a way that the first one
* is the most useful for use in the citation_pdf_url for Google Scholar
*/
public class GoogleBitstreamComparator implements Comparator<Bitstream>{
private final static Logger log = Logger.getLogger(GoogleBitstreamComparator.class);
HashMap<String, Integer> priorityMap = new HashMap<>();
private Context context;
public GoogleBitstreamComparator(Context context, Map<String, String> googleScholarSettings) {
this.context = context;
String[] shortDescriptions;
if (googleScholarSettings.containsKey("citation.prioritized_types")){
shortDescriptions = splitAndTrim(googleScholarSettings.get("citation.prioritized_types"));
} else {
log.warn("Please define citation.prioritized_types in google-metadata.properties");
shortDescriptions = new String[0];
}
int priority = 1;
for(String s: shortDescriptions){
try {
BitstreamFormat format = ContentServiceFactory.getInstance().getBitstreamFormatService().findByShortDescription(context, s);
if (format != null) {
priorityMap.put(format.getMIMEType(), priority);
} else {
log.warn(s + " is not a valid short description, please add it to bitstream-formats.xml");
}
priority++;
} catch (SQLException e){
log.error(e.getMessage());
}
}
}
private String[] splitAndTrim(String toSplit){
if(toSplit != null) {
String[] splittedArray = toSplit.split(",");
for (int i = 0; i < splittedArray.length; i++)
splittedArray[i] = splittedArray[i].trim();
return splittedArray;
}
else {
return new String[0];
}
}
/**
* Compares two bitstreams based on their mimetypes, if mimetypes are the same,then the largest bitstream comes first
* See google-metadata.properties to define the order
* @param b1 first bitstream
* @param b2 second bitstream
* @return
*/
public int compare(Bitstream b1, Bitstream b2) {
int priority1 = getPriorityFromBitstream(b1);
int priority2 = getPriorityFromBitstream(b2);
if(priority1 > priority2){
return 1;
}
else if(priority1 == priority2){
if(b1.getSize() <= b2.getSize()){
return 1;
}
else {
return -1;
}
}
else {
return -1;
}
}
private int getPriorityFromBitstream(Bitstream bitstream) {
try {
String check = bitstream.getFormat(context).getMIMEType();
if (priorityMap.containsKey(bitstream.getFormat(context).getMIMEType())) {
return priorityMap.get(bitstream.getFormat(context).getMIMEType());
} else {
return Integer.MAX_VALUE;
}
} catch (SQLException e) {
log.error(e.getMessage());
return Integer.MAX_VALUE;
}
}
}

View File

@@ -7,38 +7,30 @@
*/
package org.dspace.app.util;
import java.sql.SQLException;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import org.apache.log4j.Logger;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.content.*;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import org.apache.log4j.Logger;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import java.io.File;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map.Entry;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.factory.HandleServiceFactory;
import org.jdom.Element;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.sql.SQLException;
import java.util.*;
import java.util.Collection;
import java.util.Map.Entry;
/**
* Configuration and mapping for Google Scholar output metadata
* @author Sands Fish
@@ -60,7 +52,7 @@ public class GoogleMetadata
protected String itemURL;
// Configuration keys and fields
protected static Map<String, String> configuredFields = new HashMap<String, String>();
protected static Map<String, String> googleScholarSettings = new HashMap<String, String>();
// Google field names (e.g. citation_fieldname) and formatted metadata
// values
@@ -128,6 +120,8 @@ public class GoogleMetadata
protected final int ALL_FIELDS_IN_OPTION = 2;
private static GoogleBitstreamComparator googleBitstreamComparator = null;
// Load configured fields from google-metadata.properties
static
{
@@ -181,7 +175,7 @@ public class GoogleMetadata
if (null != name && !name.equals("") && null != field
&& !field.equals(""))
{
configuredFields.put(name.trim(), field.trim());
googleScholarSettings.put(name.trim(), field.trim());
}
}
}
@@ -200,9 +194,9 @@ public class GoogleMetadata
{
log.debug("Google Metadata Configuration Mapping:");
for (String name : configuredFields.keySet())
for (String name : googleScholarSettings.keySet())
{
log.debug(" " + name + " => " + configuredFields.get(name));
log.debug(" " + name + " => " + googleScholarSettings.get(name));
}
}
@@ -221,6 +215,7 @@ public class GoogleMetadata
this.item = item;
this.itemService = ContentServiceFactory.getInstance().getItemService();
itemURL = HandleServiceFactory.getInstance().getHandleService().resolveToURL(context, item.getHandle());
googleBitstreamComparator = new GoogleBitstreamComparator(context, googleScholarSettings);
parseItem();
}
@@ -234,7 +229,7 @@ public class GoogleMetadata
protected boolean addSingleField(String fieldName)
{
String config = configuredFields.get(fieldName);
String config = googleScholarSettings.get(fieldName);
if (null == config || config.equals(""))
{
@@ -738,7 +733,7 @@ public class GoogleMetadata
addSingleField(PATENT_NUMBER);
// Use config value for patent country. Should be a literal.
String countryConfig = configuredFields.get(PATENT_COUNTRY);
String countryConfig = googleScholarSettings.get(PATENT_COUNTRY);
if (null != countryConfig && !countryConfig.trim().equals(""))
{
metadataMappings.put(PATENT_COUNTRY, countryConfig.trim());
@@ -1051,10 +1046,13 @@ public class GoogleMetadata
*/
protected Bitstream findLinkableFulltext(Item item) throws SQLException {
Bitstream bestSoFar = null;
int bitstreamCount = 0;
List<Bundle> contentBundles = itemService.getBundles(item, "ORIGINAL");
for (Bundle bundle : contentBundles) {
List<Bitstream> bitstreams = bundle.getBitstreams();
Collections.sort(bitstreams, googleBitstreamComparator);
for (Bitstream candidate : bitstreams) {
if (candidate.equals(bundle.getPrimaryBitstream())) { // is primary -> use this one
if (isPublic(candidate)) {
@@ -1097,7 +1095,7 @@ public class GoogleMetadata
protected void addAggregateValues(String field, String delimiter)
{
String authorConfig = configuredFields.get(field);
String authorConfig = googleScholarSettings.get(field);
ArrayList<MetadataValue> fields = resolveMetadataFields(authorConfig);
if (null != fields && !fields.isEmpty())
@@ -1125,7 +1123,7 @@ public class GoogleMetadata
*/
protected void addMultipleValues(String FIELD)
{
String fieldConfig = configuredFields.get(FIELD);
String fieldConfig = googleScholarSettings.get(FIELD);
ArrayList<MetadataValue> fields = resolveMetadataFields(fieldConfig);
if (null != fields && !fields.isEmpty())
@@ -1146,7 +1144,7 @@ public class GoogleMetadata
protected boolean itemIsDissertation()
{
String dConfig = configuredFields.get(DISSERTATION_ID);
String dConfig = googleScholarSettings.get(DISSERTATION_ID);
if (null == dConfig || dConfig.trim().equals(""))
{
return false;
@@ -1165,7 +1163,7 @@ public class GoogleMetadata
protected boolean itemIsPatent()
{
String dConfig = configuredFields.get(PATENT_ID);
String dConfig = googleScholarSettings.get(PATENT_ID);
if (null == dConfig || dConfig.trim().equals(""))
{
return false;
@@ -1184,7 +1182,7 @@ public class GoogleMetadata
protected boolean itemIsTechReport()
{
String dConfig = configuredFields.get(TECH_REPORT_ID);
String dConfig = googleScholarSettings.get(TECH_REPORT_ID);
if (null == dConfig || dConfig.trim().equals(""))
{
return false;

View File

@@ -7,16 +7,20 @@
*/
package org.dspace.app.util;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.log4j.Logger;
import org.dspace.app.util.service.MetadataExposureService;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
import java.sql.SQLException;
import java.util.*;
/**
* Static utility class to manage configuration for exposure (hiding) of
* certain Item metadata fields.
@@ -65,6 +69,9 @@ public class MetadataExposureServiceImpl implements MetadataExposureService
@Autowired(required = true)
protected AuthorizeService authorizeService;
@Autowired(required = true)
protected ConfigurationService configurationService;
protected MetadataExposureServiceImpl()
{
@@ -132,12 +139,11 @@ public class MetadataExposureServiceImpl implements MetadataExposureService
hiddenElementSets = new HashMap<>();
hiddenElementMaps = new HashMap<>();
Enumeration pne = ConfigurationManager.propertyNames();
while (pne.hasMoreElements())
{
String key = (String)pne.nextElement();
List<String> propertyKeys = configurationService.getPropertyKeys();
for (String key : propertyKeys) {
if (key.startsWith(CONFIG_PREFIX))
{
if (configurationService.getBooleanProperty(key, true)){
String mdField = key.substring(CONFIG_PREFIX.length());
String segment[] = mdField.split("\\.", 3);
@@ -174,6 +180,7 @@ public class MetadataExposureServiceImpl implements MetadataExposureService
}
}
}
}
}
}
}

View File

@@ -1041,6 +1041,7 @@ public class ShibAuthentication implements AuthenticationMethod
*
* This method will not interpret the header value in any way.
*
* This method will return null if value is empty.
*
* @param request The HTTP request to look for values in.
* @param name The name of the attribute or header
@@ -1064,6 +1065,17 @@ public class ShibAuthentication implements AuthenticationMethod
value = request.getHeader(name.toLowerCase());
if (StringUtils.isEmpty(value))
value = request.getHeader(name.toUpperCase());
// Added extra check for empty value of an attribute.
// In case that value is Empty, it should not be returned, return 'null' instead.
// This prevents passing empty value to other methods, stops the authentication process
// and prevents creation of 'empty' DSpace EPerson if autoregister == true and it subsequent
// authentication.
if (StringUtils.isEmpty(value))
{
log.debug("ShibAuthentication - attribute " + name + " is empty!");
return null;
}
boolean reconvertAttributes =
configurationService.getBooleanProperty(

View File

@@ -47,15 +47,10 @@ public class AuthorityServiceImpl implements AuthorityService{
}
for (AuthorityIndexerInterface indexerInterface : indexers) {
indexerInterface.init(context , item);
while (indexerInterface.hasMore()) {
AuthorityValue authorityValue = indexerInterface.nextValue();
if(authorityValue != null)
indexingService.indexContent(authorityValue, true);
List<AuthorityValue> authorityValues = indexerInterface.getAuthorityValues(context , item);
for (AuthorityValue authorityValue : authorityValues) {
indexingService.indexContent(authorityValue);
}
//Close up
indexerInterface.close();
}
//Commit to our server
indexingService.commit();

View File

@@ -62,7 +62,7 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho
}
@Override
public void indexContent(AuthorityValue value, boolean force) {
public void indexContent(AuthorityValue value) {
SolrInputDocument doc = value.getSolrInputDocument();
try{

View File

@@ -0,0 +1,20 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority;
import java.util.List;
/**
* @author Jonas Van Goolen (jonas at atmire dot com)
*/
public interface SolrAuthorityInterface {
List<AuthorityValue> queryAuthorities(String text, int max);
AuthorityValue queryAuthorityID(String id);
}

View File

@@ -11,9 +11,13 @@ import org.dspace.authority.AuthorityValue;
import org.apache.log4j.Logger;
import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityService;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@@ -31,6 +35,7 @@ public class AuthorityIndexClient {
protected static final AuthorityService authorityService = AuthorityServiceFactory.getInstance().getAuthorityService();
protected static final AuthorityIndexingService indexingService = AuthorityServiceFactory.getInstance().getAuthorityIndexingService();
protected static final List<AuthorityIndexerInterface> indexers = AuthorityServiceFactory.getInstance().getAuthorityIndexers();
protected static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
public static void main(String[] args) throws Exception {
@@ -53,18 +58,23 @@ public class AuthorityIndexClient {
//Get all our values from the input forms
Map<String, AuthorityValue> toIndexValues = new HashMap<>();
for (AuthorityIndexerInterface indexerInterface : indexers) {
log.info("Initialize " + indexerInterface.getClass().getName());
System.out.println("Initialize " + indexerInterface.getClass().getName());
indexerInterface.init(context, true);
while (indexerInterface.hasMore()) {
AuthorityValue authorityValue = indexerInterface.nextValue();
if(authorityValue != null){
Iterator<Item> allItems = itemService.findAll(context);
Map<String, AuthorityValue> authorityCache = new HashMap<>();
while (allItems.hasNext()) {
Item item = allItems.next();
List<AuthorityValue> authorityValues = indexerInterface.getAuthorityValues(context, item, authorityCache);
for (AuthorityValue authorityValue : authorityValues) {
toIndexValues.put(authorityValue.getId(), authorityValue);
}
context.uncacheEntity(item);
}
//Close up
indexerInterface.close();
}
@@ -74,7 +84,7 @@ public class AuthorityIndexClient {
log.info("Writing new data");
System.out.println("Writing new data");
for(String id : toIndexValues.keySet()){
indexingService.indexContent(toIndexValues.get(id), true);
indexingService.indexContent(toIndexValues.get(id));
indexingService.commit();
}

View File

@@ -14,6 +14,8 @@ import org.dspace.content.Item;
import org.dspace.core.Context;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;
/**
*
@@ -24,17 +26,11 @@ import java.sql.SQLException;
*/
public interface AuthorityIndexerInterface {
public void init(Context context, Item item);
public List<AuthorityValue> getAuthorityValues(Context context, Item item)
throws SQLException, AuthorizeException;
public void init(Context context, boolean useCache);
public void init(Context context);
public AuthorityValue nextValue();
public boolean hasMore() throws SQLException, AuthorizeException;
public void close();
public List<AuthorityValue> getAuthorityValues(Context context, Item item, Map<String, AuthorityValue> cache)
throws SQLException, AuthorizeException;
public boolean isConfiguredProperly();
}

View File

@@ -20,7 +20,7 @@ import org.dspace.authority.AuthorityValue;
public interface AuthorityIndexingService {
public void indexContent(AuthorityValue value, boolean force);
public void indexContent(AuthorityValue value);
public void cleanIndex() throws Exception;

View File

@@ -27,13 +27,13 @@ import java.util.*;
/**
* DSpaceAuthorityIndexer is used in IndexClient, which is called by the AuthorityConsumer and the indexing-script.
* <p>
* An instance of DSpaceAuthorityIndexer is bound to a list of items.
* This can be one item or all items too depending on the init() method.
* <p>
* DSpaceAuthorityIndexer lets you iterate over each metadata value
* for each metadata field defined in dspace.cfg with 'authority.author.indexer.field'
* for each item in the list.
* The DSpaceAuthorityIndexer will return a list of all authority values for a
* given item. It will return an authority value for all metadata fields defined
* in dspace.conf with 'authority.author.indexer.field'.
* <p>
* You have to call getAuthorityValues for every Item you want to index. But you
* can supply an optional cache, to save the mapping from the metadata value to
* the new authority values for metadata fields without an authority key.
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
@@ -44,23 +44,16 @@ public class DSpaceAuthorityIndexer implements AuthorityIndexerInterface, Initia
private static final Logger log = Logger.getLogger(DSpaceAuthorityIndexer.class);
protected Iterator<Item> itemIterator;
protected Item currentItem;
/**
* The list of metadata fields which are to be indexed *
*/
protected List<String> metadataFields;
protected int currentFieldIndex;
protected int currentMetadataIndex;
protected AuthorityValue nextValue;
protected Context context;
@Autowired(required = true)
protected AuthorityValueService authorityValueService;
@Autowired(required = true)
protected ItemService itemService;
protected boolean useCache;
protected Map<String, AuthorityValue> cache;
@Autowired(required = true)
protected ConfigurationService configurationService;
@@ -76,143 +69,100 @@ public class DSpaceAuthorityIndexer implements AuthorityIndexerInterface, Initia
}
}
@Override
public void init(Context context, Item item) {
ArrayList<Item> itemList = new ArrayList<>();
itemList.add(item);
this.itemIterator = itemList.iterator();
currentItem = this.itemIterator.next();
initialize(context);
public List<AuthorityValue> getAuthorityValues(Context context, Item item)
throws SQLException, AuthorizeException
{
return getAuthorityValues(context, item, null);
}
@Override
public void init(Context context) {
init(context, false);
}
public List<AuthorityValue> getAuthorityValues(Context context, Item item, Map<String, AuthorityValue> cache)
throws SQLException, AuthorizeException
{
List<AuthorityValue> values = new ArrayList<>();
@Override
public void init(Context context, boolean useCache) {
try {
this.itemIterator = itemService.findAll(context);
currentItem = this.itemIterator.next();
} catch (SQLException e) {
log.error("Error while retrieving all items in the metadata indexer");
}
initialize(context);
this.useCache = useCache;
}
for (String metadataField : metadataFields) {
List<MetadataValue> metadataValues = itemService.getMetadataByMetadataString(item, metadataField);
for (MetadataValue metadataValue : metadataValues) {
String content = metadataValue.getValue();
String authorityKey = metadataValue.getAuthority();
protected void initialize(Context context) {
this.context = context;
// We only want to update our item IF our UUID is not present
// or if we need to generate one.
boolean requiresItemUpdate = StringUtils.isBlank(authorityKey) ||
StringUtils.startsWith(authorityKey, AuthorityValueService.GENERATE);
currentFieldIndex = 0;
currentMetadataIndex = 0;
useCache = false;
cache = new HashMap<>();
}
@Override
public AuthorityValue nextValue() {
return nextValue;
}
@Override
public boolean hasMore() throws SQLException, AuthorizeException {
if (currentItem == null) {
return false;
}
// 1. iterate over the metadata values
String metadataField = metadataFields.get(currentFieldIndex);
List<MetadataValue> values = itemService.getMetadataByMetadataString(currentItem, metadataField);
if (currentMetadataIndex < values.size()) {
prepareNextValue(metadataField, values.get(currentMetadataIndex));
currentMetadataIndex++;
return true;
} else {
// 2. iterate over the metadata fields
if ((currentFieldIndex + 1) < metadataFields.size()) {
currentFieldIndex++;
//Reset our current metadata index since we are moving to another field
currentMetadataIndex = 0;
return hasMore();
} else {
// 3. iterate over the items
if (itemIterator.hasNext()) {
currentItem = itemIterator.next();
//Reset our current field index
currentFieldIndex = 0;
//Reset our current metadata index
currentMetadataIndex = 0;
} else {
currentItem = null;
AuthorityValue value = null;
if (StringUtils.isBlank(authorityKey) && cache != null) {
// This is a value currently without an authority. So query
// the cache, if an authority is found for the exact value.
value = cache.get(content);
}
if (value == null) {
value = getAuthorityValue(context, metadataField, content,authorityKey);
}
if (value != null) {
if (requiresItemUpdate) {
value.updateItem(context, item, metadataValue);
try {
itemService.update(context, item);
}
catch (Exception e) {
log.error("Error creating a metadatavalue's authority", e);
}
}
if (cache != null) {
cache.put(content, value);
}
values.add(value);
}
else {
log.error("Error getting an authority value for " +
"the metadata value \"" + content + "\" " +
"in the field \"" + metadataField + "\" " +
"of the item " + item.getHandle());
}
return hasMore();
}
}
return values;
}
/**
* This method looks at the authority of a metadata.
* This method looks at the authority of a metadata value.
* If the authority can be found in solr, that value is reused.
* Otherwise a new authority value will be generated that will be indexed in solr.
*
* If the authority starts with AuthorityValueGenerator.GENERATE, a specific type of AuthorityValue will be generated.
* Depending on the type this may involve querying an external REST service
*
* @param context Current DSpace context
* @param metadataField Is one of the fields defined in dspace.cfg to be indexed.
* @param value Is one of the values of the given metadataField in one of the items being indexed.
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
* @param metadataContent Content of the current metadata value.
* @param metadataAuthorityKey Existing authority of the metadata value.
*/
protected void prepareNextValue(String metadataField, MetadataValue value) throws SQLException, AuthorizeException {
private AuthorityValue getAuthorityValue(Context context, String metadataField,
String metadataContent, String metadataAuthorityKey)
{
if (StringUtils.isNotBlank(metadataAuthorityKey) &&
!metadataAuthorityKey.startsWith(AuthorityValueService.GENERATE)) {
// !uid.startsWith(AuthorityValueGenerator.GENERATE) is not strictly
// necessary here but it prevents exceptions in solr
nextValue = null;
String content = value.getValue();
String authorityKey = value.getAuthority();
//We only want to update our item IF our UUID is not present or if we need to generate one.
boolean requiresItemUpdate = StringUtils.isBlank(authorityKey) || StringUtils.startsWith(authorityKey, AuthorityValueService.GENERATE);
if (StringUtils.isNotBlank(authorityKey) && !authorityKey.startsWith(AuthorityValueService.GENERATE)) {
// !uid.startsWith(AuthorityValueGenerator.GENERATE) is not strictly necessary here but it prevents exceptions in solr
nextValue = authorityValueService.findByUID(context, authorityKey);
}
if (nextValue == null && StringUtils.isBlank(authorityKey) && useCache) {
// A metadata without authority is being indexed
// If there is an exact match in the cache, reuse it rather than adding a new one.
AuthorityValue cachedAuthorityValue = cache.get(content);
if (cachedAuthorityValue != null) {
nextValue = cachedAuthorityValue;
AuthorityValue value = authorityValueService.findByUID(context, metadataAuthorityKey);
if (value != null) {
return value;
}
}
if (nextValue == null) {
nextValue = authorityValueService.generate(context, authorityKey, content, metadataField.replaceAll("\\.", "_"));
}
if (nextValue != null && requiresItemUpdate) {
nextValue.updateItem(context, currentItem, value);
try {
itemService.update(context, currentItem);
} catch (Exception e) {
log.error("Error creating a metadatavalue's authority", e);
}
}
if (useCache) {
cache.put(content, nextValue);
}
}
@Override
public void close() {
itemIterator = null;
cache.clear();
return authorityValueService.generate(context, metadataAuthorityKey,
metadataContent, metadataField.replaceAll("\\.", "_"));
}
@Override

View File

@@ -1,86 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.orcid.model.Bio;
import org.dspace.authority.orcid.model.Work;
import org.dspace.authority.orcid.xml.XMLtoBio;
import org.dspace.authority.orcid.xml.XMLtoWork;
import org.dspace.authority.rest.RestSource;
import org.apache.log4j.Logger;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.w3c.dom.Document;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Orcid extends RestSource {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(Orcid.class);
private static Orcid orcid;
public static Orcid getOrcid() {
if (orcid == null) {
orcid = DSpaceServicesFactory.getInstance().getServiceManager().getServiceByName("OrcidSource", Orcid.class);
}
return orcid;
}
private Orcid(String url) {
super(url);
}
public Bio getBio(String id) {
Document bioDocument = restConnector.get(id + "/orcid-bio");
XMLtoBio converter = new XMLtoBio();
Bio bio = converter.convert(bioDocument).get(0);
bio.setOrcid(id);
return bio;
}
public List<Work> getWorks(String id) {
Document document = restConnector.get(id + "/orcid-works");
XMLtoWork converter = new XMLtoWork();
return converter.convert(document);
}
public List<Bio> queryBio(String name, int start, int rows) {
Document bioDocument = restConnector.get("search/orcid-bio?q=" + URLEncoder.encode("\"" + name + "\"") + "&start=" + start + "&rows=" + rows);
XMLtoBio converter = new XMLtoBio();
return converter.convert(bioDocument);
}
@Override
public List<AuthorityValue> queryAuthorities(String text, int max) {
List<Bio> bios = queryBio(text, 0, max);
List<AuthorityValue> authorities = new ArrayList<AuthorityValue>();
for (Bio bio : bios) {
authorities.add(OrcidAuthorityValue.create(bio));
}
return authorities;
}
@Override
public AuthorityValue queryAuthorityID(String id) {
Bio bio = getBio(id);
return OrcidAuthorityValue.create(bio);
}
}

View File

@@ -1,320 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.AuthorityValueServiceImpl;
import org.dspace.authority.PersonAuthorityValue;
import org.dspace.authority.orcid.model.Bio;
import org.dspace.authority.orcid.model.BioExternalIdentifier;
import org.dspace.authority.orcid.model.BioName;
import org.dspace.authority.orcid.model.BioResearcherUrl;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrInputDocument;
import java.util.*;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class OrcidAuthorityValue extends PersonAuthorityValue {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(OrcidAuthorityValue.class);
private String orcid_id;
private Map<String, List<String>> otherMetadata = new HashMap<String, List<String>>();
private boolean update; // used in setValues(Bio bio)
/**
* Creates an instance of OrcidAuthorityValue with only uninitialized fields.
* This is meant to be filled in with values from an existing record.
* To create a brand new OrcidAuthorityValue, use create()
*/
public OrcidAuthorityValue() {
}
public OrcidAuthorityValue(SolrDocument document) {
super(document);
}
public String getOrcid_id() {
return orcid_id;
}
public void setOrcid_id(String orcid_id) {
this.orcid_id = orcid_id;
}
public Map<String, List<String>> getOtherMetadata() {
return otherMetadata;
}
public void addOtherMetadata(String label, String data) {
List<String> strings = otherMetadata.get(label);
if (strings == null) {
strings = new ArrayList<String>();
}
strings.add(data);
otherMetadata.put(label, strings);
}
@Override
public SolrInputDocument getSolrInputDocument() {
SolrInputDocument doc = super.getSolrInputDocument();
if (StringUtils.isNotBlank(getOrcid_id())) {
doc.addField("orcid_id", getOrcid_id());
}
for (String t : otherMetadata.keySet()) {
List<String> data = otherMetadata.get(t);
for (String data_entry : data) {
doc.addField("label_" + t, data_entry);
}
}
return doc;
}
@Override
public void setValues(SolrDocument document) {
super.setValues(document);
this.orcid_id = String.valueOf(document.getFieldValue("orcid_id"));
otherMetadata = new HashMap<String, List<String>>();
for (String fieldName : document.getFieldNames()) {
String labelPrefix = "label_";
if (fieldName.startsWith(labelPrefix)) {
String label = fieldName.substring(labelPrefix.length());
List<String> list = new ArrayList<String>();
Collection<Object> fieldValues = document.getFieldValues(fieldName);
for (Object o : fieldValues) {
list.add(String.valueOf(o));
}
otherMetadata.put(label, list);
}
}
}
public static OrcidAuthorityValue create() {
OrcidAuthorityValue orcidAuthorityValue = new OrcidAuthorityValue();
orcidAuthorityValue.setId(UUID.randomUUID().toString());
orcidAuthorityValue.updateLastModifiedDate();
orcidAuthorityValue.setCreationDate(new Date());
return orcidAuthorityValue;
}
/**
* Create an authority based on a given orcid bio
* @param bio Bio
* @return OrcidAuthorityValue
*/
public static OrcidAuthorityValue create(Bio bio) {
OrcidAuthorityValue authority = OrcidAuthorityValue.create();
authority.setValues(bio);
return authority;
}
public boolean setValues(Bio bio) {
BioName name = bio.getName();
if (updateValue(bio.getOrcid(), getOrcid_id())) {
setOrcid_id(bio.getOrcid());
}
if (updateValue(name.getFamilyName(), getLastName())) {
setLastName(name.getFamilyName());
}
if (updateValue(name.getGivenNames(), getFirstName())) {
setFirstName(name.getGivenNames());
}
if (StringUtils.isNotBlank(name.getCreditName())) {
if (!getNameVariants().contains(name.getCreditName())) {
addNameVariant(name.getCreditName());
update = true;
}
}
for (String otherName : name.getOtherNames()) {
if (!getNameVariants().contains(otherName)) {
addNameVariant(otherName);
update = true;
}
}
if (updateOtherMetadata("country", bio.getCountry())) {
addOtherMetadata("country", bio.getCountry());
}
for (String keyword : bio.getKeywords()) {
if (updateOtherMetadata("keyword", keyword)) {
addOtherMetadata("keyword", keyword);
}
}
for (BioExternalIdentifier externalIdentifier : bio.getBioExternalIdentifiers()) {
if (updateOtherMetadata("external_identifier", externalIdentifier.toString())) {
addOtherMetadata("external_identifier", externalIdentifier.toString());
}
}
for (BioResearcherUrl researcherUrl : bio.getResearcherUrls()) {
if (updateOtherMetadata("researcher_url", researcherUrl.toString())) {
addOtherMetadata("researcher_url", researcherUrl.toString());
}
}
if (updateOtherMetadata("biography", bio.getBiography())) {
addOtherMetadata("biography", bio.getBiography());
}
setValue(getName());
if (update) {
update();
}
boolean result = update;
update = false;
return result;
}
private boolean updateOtherMetadata(String label, String data) {
List<String> strings = getOtherMetadata().get(label);
boolean update;
if (strings == null) {
update = StringUtils.isNotBlank(data);
} else {
update = !strings.contains(data);
}
if (update) {
this.update = true;
}
return update;
}
private boolean updateValue(String incoming, String resident) {
boolean update = StringUtils.isNotBlank(incoming) && !incoming.equals(resident);
if (update) {
this.update = true;
}
return update;
}
@Override
public Map<String, String> choiceSelectMap() {
Map<String, String> map = super.choiceSelectMap();
map.put("orcid", getOrcid_id());
return map;
}
@Override
public String getAuthorityType() {
return "orcid";
}
@Override
public String generateString() {
String generateString = AuthorityValueServiceImpl.GENERATE + getAuthorityType() + AuthorityValueServiceImpl.SPLIT;
if (StringUtils.isNotBlank(getOrcid_id())) {
generateString += getOrcid_id();
}
return generateString;
}
@Override
public AuthorityValue newInstance(String info) {
AuthorityValue authorityValue = null;
if (StringUtils.isNotBlank(info)) {
Orcid orcid = Orcid.getOrcid();
authorityValue = orcid.queryAuthorityID(info);
} else {
authorityValue = OrcidAuthorityValue.create();
}
return authorityValue;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
OrcidAuthorityValue that = (OrcidAuthorityValue) o;
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
return orcid_id != null ? orcid_id.hashCode() : 0;
}
@Override
public boolean hasTheSameInformationAs(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.hasTheSameInformationAs(o)) {
return false;
}
OrcidAuthorityValue that = (OrcidAuthorityValue) o;
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
return false;
}
for (String key : otherMetadata.keySet()) {
if(otherMetadata.get(key) != null){
List<String> metadata = otherMetadata.get(key);
List<String> otherMetadata = that.otherMetadata.get(key);
if (otherMetadata == null) {
return false;
} else {
HashSet<String> metadataSet = new HashSet<String>(metadata);
HashSet<String> otherMetadataSet = new HashSet<String>(otherMetadata);
if (!metadataSet.equals(otherMetadataSet)) {
return false;
}
}
}else{
if(that.otherMetadata.get(key) != null){
return false;
}
}
}
return true;
}
}

View File

@@ -0,0 +1,185 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid;
import org.apache.commons.lang.StringUtils;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.log4j.Logger;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.SolrAuthorityInterface;
import org.dspace.authority.orcid.xml.XMLtoBio;
import org.dspace.authority.rest.RESTConnector;
import org.json.JSONObject;
import org.orcid.jaxb.model.record_v2.Person;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
/**
* @author Jonas Van Goolen (jonas at atmire dot com)
* This class contains all methods for retrieving "Person" objects calling the ORCID (version 2) endpoints.
* Additionally, this can also create AuthorityValues based on these returned Person objects
*/
public class Orcidv2 implements SolrAuthorityInterface {
private static Logger log = Logger.getLogger(Orcidv2.class);
public RESTConnector restConnector;
private String OAUTHUrl;
private String clientId;
private String clientSecret;
private String accessToken;
/**
* Initialize the accessToken that is required for all subsequent calls to ORCID
*/
public void init() throws IOException {
if (StringUtils.isNotBlank(accessToken) && StringUtils.isNotBlank(clientSecret)) {
String authenticationParameters = "?client_id=" + clientId + "&client_secret=" + clientSecret + "&scope=/read-public&grant_type=client_credentials";
HttpPost httpPost = new HttpPost(OAUTHUrl + authenticationParameters);
httpPost.addHeader("Accept", "application/json");
httpPost.addHeader("Content-Type", "application/x-www-form-urlencoded");
HttpClient httpClient = HttpClientBuilder.create().build();
HttpResponse getResponse = httpClient.execute(httpPost);
InputStream is = getResponse.getEntity().getContent();
BufferedReader streamReader = new BufferedReader(new InputStreamReader(is, "UTF-8"));
JSONObject responseObject = null;
String inputStr;
while ((inputStr = streamReader.readLine()) != null && responseObject == null) {
if (inputStr.startsWith("{") && inputStr.endsWith("}") && inputStr.contains("access_token")) {
try {
responseObject = new JSONObject(inputStr);
} catch (Exception e) {
//Not as valid as I'd hoped, move along
responseObject = null;
}
}
}
if (responseObject != null && responseObject.has("access_token")) {
accessToken = (String) responseObject.get("access_token");
}
}
}
/**
* Makes an instance of the Orcidv2 class based on the provided parameters.
* This constructor is called through the spring bean initialization
*/
private Orcidv2(String url, String OAUTHUrl, String clientId, String clientSecret) {
this.restConnector = new RESTConnector(url);
this.OAUTHUrl = OAUTHUrl;
this.clientId = clientId;
this.clientSecret = clientSecret;
}
/**
* Makes an instance of the Orcidv2 class based on the provided parameters.
* This constructor is called through the spring bean initialization
*/
private Orcidv2(String url) {
this.restConnector = new RESTConnector(url);
}
/**
* Makes an instance of the AuthorityValue with the given information.
* @param text search string
* @return List<AuthorityValue>
*/
@Override
public List<AuthorityValue> queryAuthorities(String text, int max) {
List<Person> bios = queryBio(text, max);
List<AuthorityValue> result = new ArrayList<>();
for (Person person : bios) {
AuthorityValue orcidAuthorityValue = Orcidv2AuthorityValue.create(person);
if (orcidAuthorityValue != null) {
result.add(orcidAuthorityValue);
}
}
return result;
}
/**
* Create an AuthorityValue from a Person retrieved using the given orcid identifier.
* @param id orcid identifier
* @return AuthorityValue
*/
public AuthorityValue queryAuthorityID(String id) {
Person person = getBio(id);
AuthorityValue valueFromPerson = Orcidv2AuthorityValue.create(person);
return valueFromPerson;
}
/**
* Retrieve a Person object based on a given orcid identifier
* @param id orcid identifier
* @return Person
*/
public Person getBio(String id) {
log.debug("getBio called with ID=" + id);
if(!isValid(id)){
return null;
}
InputStream bioDocument = restConnector.get(id + ((id.endsWith("/person")) ? "" : "/person"), accessToken);
XMLtoBio converter = new XMLtoBio();
Person person = converter.convertSinglePerson(bioDocument);
return person;
}
/**
* Retrieve a list of Person objects.
* @param text search string
* @param start offset to use
* @param rows how many rows to return
* @return List<Person>
*/
public List<Person> queryBio(String text, int start, int rows) {
if (rows > 100) {
throw new IllegalArgumentException("The maximum number of results to retrieve cannot exceed 100.");
}
String searchPath = "search?q=" + URLEncoder.encode(text) + "&start=" + start + "&rows=" + rows;
log.debug("queryBio searchPath=" + searchPath + " accessToken=" + accessToken);
InputStream bioDocument = restConnector.get(searchPath, accessToken);
XMLtoBio converter = new XMLtoBio();
List<Person> bios = converter.convert(bioDocument);
return bios;
}
/**
* Retrieve a list of Person objects.
* @param text search string
* @param max how many rows to return
* @return List<Person>
*/
public List<Person> queryBio(String text, int max) {
return queryBio(text, 0, max);
}
/**
* Check to see if the provided text has the correct ORCID syntax.
* Since only searching on ORCID id is allowed, this way, we filter out any queries that would return a blank result anyway
*/
private boolean isValid(String text) {
return StringUtils.isNotBlank(text) && text.matches(Orcidv2AuthorityValue.ORCID_ID_SYNTAX);
}
}

View File

@@ -0,0 +1,330 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid;
import org.apache.commons.lang.StringUtils;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrInputDocument;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.AuthorityValueServiceImpl;
import org.dspace.authority.PersonAuthorityValue;
import org.dspace.utils.DSpace;
import org.orcid.jaxb.model.common_v2.ExternalId;
import org.orcid.jaxb.model.record_v2.*;
import java.util.*;
/**
* @author Jonas Van Goolen (jonas at atmire dot com)
*/
public class Orcidv2AuthorityValue extends PersonAuthorityValue {
/*
* The ORCID identifier
*/
private String orcid_id;
/*
* Map containing key-value pairs filled in by "setValues(Person person)".
* This represents all dynamic information of the object.
*/
private Map<String, List<String>> otherMetadata = new HashMap<String, List<String>>();
/**
* The syntax that the ORCID id needs to conform to
*/
public static final String ORCID_ID_SYNTAX = "\\d{4}-\\d{4}-\\d{4}-(\\d{3}X|\\d{4})";
/**
* Creates an instance of Orcidv2AuthorityValue with only uninitialized fields.
* This is meant to be filled in with values from an existing record.
* To create a brand new Orcidv2AuthorityValue, use create()
*/
public Orcidv2AuthorityValue() {
}
public Orcidv2AuthorityValue(SolrDocument document) {
super(document);
}
public String getOrcid_id() {
return orcid_id;
}
public void setOrcid_id(String orcid_id) {
this.orcid_id = orcid_id;
}
/**
* Create an empty authority.
* @return OrcidAuthorityValue
*/
public static Orcidv2AuthorityValue create() {
Orcidv2AuthorityValue orcidAuthorityValue = new Orcidv2AuthorityValue();
orcidAuthorityValue.setId(UUID.randomUUID().toString());
orcidAuthorityValue.updateLastModifiedDate();
orcidAuthorityValue.setCreationDate(new Date());
return orcidAuthorityValue;
}
/**
* Create an authority based on a given orcid bio
* @return OrcidAuthorityValue
*/
public static Orcidv2AuthorityValue create(Person person) {
if (person == null) {
return null;
}
Orcidv2AuthorityValue authority = Orcidv2AuthorityValue.create();
authority.setValues(person);
return authority;
}
/**
* Initialize this instance based on a Person object
* @param person Person
*/
protected void setValues(Person person) {
NameType name = person.getName();
if (!StringUtils.equals(name.getPath(), this.getOrcid_id())) {
this.setOrcid_id(name.getPath());
}
if (!StringUtils.equals(name.getFamilyName().getValue(), this.getLastName())) {
this.setLastName(name.getFamilyName().getValue());
}
if (!StringUtils.equals(name.getGivenNames().getValue(), this.getFirstName())) {
this.setFirstName(name.getGivenNames().getValue());
}
if (name.getCreditName() != null && StringUtils.isNotBlank(name.getCreditName().getValue())) {
if (!this.getNameVariants().contains(name.getCreditName())) {
this.addNameVariant(name.getCreditName().getValue());
}
}
if (person.getKeywords() != null) {
for (KeywordType keyword : person.getKeywords().getKeyword()) {
if (this.isNewMetadata("keyword", keyword.getContent())) {
this.addOtherMetadata("keyword", keyword.getContent());
}
}
}
ExternalIdentifiers externalIdentifiers = person.getExternalIdentifiers();
if (externalIdentifiers != null) {
for (ExternalId externalIdentifier : externalIdentifiers.getExternalIdentifier()) {
if (this.isNewMetadata("external_identifier", externalIdentifier.getExternalIdValue())) {
this.addOtherMetadata("external_identifier", externalIdentifier.getExternalIdValue());
}
}
}
if (person.getResearcherUrls() != null) {
for (ResearcherUrlType researcherUrl : person.getResearcherUrls().getResearcherUrl()) {
if (this.isNewMetadata("researcher_url", researcherUrl.getUrl().getValue())) {
this.addOtherMetadata("researcher_url", researcherUrl.getUrl().getValue());
}
}
}
if (person.getBiography() != null) {
if (this.isNewMetadata("biography", person.getBiography().getContent())) {
this.addOtherMetadata("biography", person.getBiography().getContent());
}
}
this.setValue(this.getName());
}
/**
* Makes an instance of the AuthorityValue with the given information.
* @param info string info
* @return AuthorityValue
*/
@Override
public AuthorityValue newInstance(String info) {
AuthorityValue authorityValue = null;
if (StringUtils.isNotBlank(info)) {
Orcidv2 orcid = new DSpace().getServiceManager().getServiceByName("AuthoritySource", Orcidv2.class);
authorityValue = orcid.queryAuthorityID(info);
} else {
authorityValue = this.create();
}
return authorityValue;
}
@Override
public void setValue(String value) {
super.setValue(value);
}
/**
* Check to see if the provided label / data pair is already present in the "otherMetadata" or not
* */
public boolean isNewMetadata(String label, String data) {
List<String> strings = getOtherMetadata().get(label);
boolean update;
if (strings == null) {
update = StringUtils.isNotBlank(data);
} else {
update = !strings.contains(data);
}
return update;
}
/**
* Add additional metadata to the otherMetadata map*/
public void addOtherMetadata(String label, String data) {
List<String> strings = otherMetadata.get(label);
if (strings == null) {
strings = new ArrayList<>();
}
strings.add(data);
otherMetadata.put(label, strings);
}
public Map<String, List<String>> getOtherMetadata() {
return otherMetadata;
}
/**
* Generate a solr record from this instance
* @return SolrInputDocument
*/
@Override
public SolrInputDocument getSolrInputDocument() {
SolrInputDocument doc = super.getSolrInputDocument();
if (StringUtils.isNotBlank(getOrcid_id())) {
doc.addField("orcid_id", getOrcid_id());
}
for (String t : otherMetadata.keySet()) {
List<String> data = otherMetadata.get(t);
for (String data_entry : data) {
doc.addField("label_" + t, data_entry);
}
}
return doc;
}
/**
* Information that can be used the choice ui
* @return map
*/
@Override
public Map<String, String> choiceSelectMap() {
Map<String, String> map = super.choiceSelectMap();
String orcid_id = getOrcid_id();
if (StringUtils.isNotBlank(orcid_id)) {
map.put("orcid", orcid_id);
}
return map;
}
@Override
public String getAuthorityType() {
return "orcid";
}
/**
* Provides a string that will allow this AuthorityType to be recognized and provides information to create a new instance to be created using public Orcidv2AuthorityValue newInstance(String info).
* @return see {@link org.dspace.authority.service.AuthorityValueService#GENERATE AuthorityValueService.GENERATE}
*/
@Override
public String generateString() {
String generateString = AuthorityValueServiceImpl.GENERATE + getAuthorityType() + AuthorityValueServiceImpl.SPLIT;
if (StringUtils.isNotBlank(getOrcid_id())) {
generateString += getOrcid_id();
}
return generateString;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Orcidv2AuthorityValue that = (Orcidv2AuthorityValue) o;
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
return orcid_id != null ? orcid_id.hashCode() : 0;
}
/**
* The regular equals() only checks if both AuthorityValues describe the same authority.
* This method checks if the AuthorityValues have different information
* E.g. it is used to decide when lastModified should be updated.
* @param o object
* @return true or false
*/
@Override
public boolean hasTheSameInformationAs(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.hasTheSameInformationAs(o)) {
return false;
}
Orcidv2AuthorityValue that = (Orcidv2AuthorityValue) o;
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
return false;
}
for (String key : otherMetadata.keySet()) {
if (otherMetadata.get(key) != null) {
List<String> metadata = otherMetadata.get(key);
List<String> otherMetadata = that.otherMetadata.get(key);
if (otherMetadata == null) {
return false;
} else {
HashSet<String> metadataSet = new HashSet<String>(metadata);
HashSet<String> otherMetadataSet = new HashSet<String>(otherMetadata);
if (!metadataSet.equals(otherMetadataSet)) {
return false;
}
}
} else {
if (that.otherMetadata.get(key) != null) {
return false;
}
}
}
return true;
}
}

View File

@@ -1,113 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
import java.util.LinkedHashSet;
import java.util.Set;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Bio {
protected String orcid;
protected BioName name;
protected String country;
protected Set<String> keywords;
protected Set<BioExternalIdentifier> bioExternalIdentifiers;
protected Set<BioResearcherUrl> researcherUrls;
protected String biography;
public Bio() {
this.name = new BioName();
keywords = new LinkedHashSet<String>();
bioExternalIdentifiers = new LinkedHashSet<BioExternalIdentifier>();
researcherUrls = new LinkedHashSet<BioResearcherUrl>();
}
public String getOrcid() {
return orcid;
}
public void setOrcid(String orcid) {
this.orcid = orcid;
}
public BioName getName() {
return name;
}
public void setName(BioName name) {
this.name = name;
}
public String getCountry() {
return country;
}
public void setCountry(String country) {
this.country = country;
}
public Set<String> getKeywords() {
return keywords;
}
public void addKeyword(String keyword) {
this.keywords.add(keyword);
}
public Set<BioExternalIdentifier> getBioExternalIdentifiers() {
return bioExternalIdentifiers;
}
public void addExternalIdentifier(BioExternalIdentifier externalReference) {
bioExternalIdentifiers.add(externalReference);
}
public Set<BioResearcherUrl> getResearcherUrls() {
return researcherUrls;
}
public void addResearcherUrl(BioResearcherUrl researcherUrl) {
researcherUrls.add(researcherUrl);
}
public String getBiography() {
return biography;
}
public void setBiography(String biography) {
this.biography = biography;
}
@Override
public String toString() {
return "Bio{" +
"orcid='" + orcid + '\'' +
", name=" + name +
", country='" + country + '\'' +
", keywords=" + keywords +
", bioExternalIdentifiers=" + bioExternalIdentifiers +
", researcherUrls=" + researcherUrls +
", biography='" + biography + '\'' +
'}';
}
}

View File

@@ -1,109 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class BioExternalIdentifier {
protected String id_orcid;
protected String id_common_name;
protected String id_reference;
protected String id_url;
public BioExternalIdentifier(String id_orcid, String id_common_name, String id_reference, String id_url) {
this.id_orcid = id_orcid;
this.id_common_name = id_common_name;
this.id_reference = id_reference;
this.id_url = id_url;
}
public String getId_orcid() {
return id_orcid;
}
public void setId_orcid(String id_orcid) {
this.id_orcid = id_orcid;
}
public String getId_common_name() {
return id_common_name;
}
public void setId_common_name(String id_common_name) {
this.id_common_name = id_common_name;
}
public String getId_reference() {
return id_reference;
}
public void setId_reference(String id_reference) {
this.id_reference = id_reference;
}
public String getId_url() {
return id_url;
}
public void setId_url(String id_url) {
this.id_url = id_url;
}
@Override
public String toString() {
return "BioExternalIdentifier{" +
"id_orcid='" + id_orcid + '\'' +
", id_common_name='" + id_common_name + '\'' +
", id_reference='" + id_reference + '\'' +
", id_url='" + id_url + '\'' +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BioExternalIdentifier that = (BioExternalIdentifier) o;
if (id_common_name != null ? !id_common_name.equals(that.id_common_name) : that.id_common_name != null) {
return false;
}
if (id_orcid != null ? !id_orcid.equals(that.id_orcid) : that.id_orcid != null) {
return false;
}
if (id_reference != null ? !id_reference.equals(that.id_reference) : that.id_reference != null) {
return false;
}
if (id_url != null ? !id_url.equals(that.id_url) : that.id_url != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = id_orcid != null ? id_orcid.hashCode() : 0;
result = 31 * result + (id_common_name != null ? id_common_name.hashCode() : 0);
result = 31 * result + (id_reference != null ? id_reference.hashCode() : 0);
result = 31 * result + (id_url != null ? id_url.hashCode() : 0);
return result;
}
}

View File

@@ -1,115 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
import java.util.ArrayList;
import java.util.List;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class BioName {
protected String givenNames;
protected String familyName;
protected String creditName;
protected List<String> otherNames;
BioName() {
otherNames = new ArrayList<String>();
}
BioName(String givenNames, String familyName, String creditName, List<String> otherNames) {
this.givenNames = givenNames;
this.familyName = familyName;
this.creditName = creditName;
this.otherNames = otherNames;
}
public String getGivenNames() {
return givenNames;
}
public void setGivenNames(String givenNames) {
this.givenNames = givenNames;
}
public String getFamilyName() {
return familyName;
}
public void setFamilyName(String familyName) {
this.familyName = familyName;
}
public String getCreditName() {
return creditName;
}
public void setCreditName(String creditName) {
this.creditName = creditName;
}
public List<String> getOtherNames() {
return otherNames;
}
public void setOtherNames(List<String> otherNames) {
this.otherNames = otherNames;
}
@Override
public String toString() {
return "BioName{" +
"givenNames='" + givenNames + '\'' +
", familyName='" + familyName + '\'' +
", creditName='" + creditName + '\'' +
", otherNames=" + otherNames +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BioName bioName = (BioName) o;
if (creditName != null ? !creditName.equals(bioName.creditName) : bioName.creditName != null) {
return false;
}
if (familyName != null ? !familyName.equals(bioName.familyName) : bioName.familyName != null) {
return false;
}
if (givenNames != null ? !givenNames.equals(bioName.givenNames) : bioName.givenNames != null) {
return false;
}
if (otherNames != null ? !otherNames.equals(bioName.otherNames) : bioName.otherNames != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = givenNames != null ? givenNames.hashCode() : 0;
result = 31 * result + (familyName != null ? familyName.hashCode() : 0);
result = 31 * result + (creditName != null ? creditName.hashCode() : 0);
result = 31 * result + (otherNames != null ? otherNames.hashCode() : 0);
return result;
}
}

View File

@@ -1,78 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class BioResearcherUrl {
protected String name;
protected String url;
public BioResearcherUrl(String name, String url) {
this.name = name;
this.url = url;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
@Override
public String toString() {
return "BioResearcherUrl{" +
"name='" + name + '\'' +
", url='" + url + '\'' +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BioResearcherUrl that = (BioResearcherUrl) o;
if (name != null ? !name.equals(that.name) : that.name != null) {
return false;
}
if (url != null ? !url.equals(that.url) : that.url != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = name != null ? name.hashCode() : 0;
result = 31 * result + (url != null ? url.hashCode() : 0);
return result;
}
}

View File

@@ -1,50 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Citation {
private CitationType type;
private String citation;
public Citation(CitationType type, String citation) {
this.type = type;
this.citation = citation;
}
public CitationType getType() {
return type;
}
public void setType(CitationType type) {
this.type = type;
}
public String getCitation() {
return citation;
}
public void setCitation(String citation) {
this.citation = citation;
}
@Override
public String toString() {
return "Citation{" +
"type=" + type +
", citation='" + citation + '\'' +
'}';
}
}

View File

@@ -1,29 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public enum CitationType {
FORMATTED_UNSPECIFIED,
BIBTEX,
FORMATTED_APA,
FORMATTED_HARVARD,
FORMATTED_IEEE,
FORMATTED_MLA,
FORMATTED_VANCOUVER,
FORMATTED_CHICAGO
}

View File

@@ -1,111 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
import java.util.Set;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Contributor {
private String orcid;
private String creditName;
private String email;
private Set<ContributorAttribute> contributorAttributes;
public Contributor(String orcid, String creditName, String email, Set<ContributorAttribute> contributorAttributes) {
this.orcid = orcid;
this.creditName = creditName;
this.email = email;
this.contributorAttributes = contributorAttributes;
}
public String getOrcid() {
return orcid;
}
public void setOrcid(String orcid) {
this.orcid = orcid;
}
public String getCreditName() {
return creditName;
}
public void setCreditName(String creditName) {
this.creditName = creditName;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public Set<ContributorAttribute> getContributorAttributes() {
return contributorAttributes;
}
public void setContributorAttributes(Set<ContributorAttribute> contributorAttributes) {
this.contributorAttributes = contributorAttributes;
}
@Override
public String toString() {
return "Contributor{" +
"orcid='" + orcid + '\'' +
", creditName='" + creditName + '\'' +
", email='" + email + '\'' +
", contributorAttributes=" + contributorAttributes +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Contributor that = (Contributor) o;
if (contributorAttributes != null ? !contributorAttributes.equals(that.contributorAttributes) : that.contributorAttributes != null) {
return false;
}
if (creditName != null ? !creditName.equals(that.creditName) : that.creditName != null) {
return false;
}
if (email != null ? !email.equals(that.email) : that.email != null) {
return false;
}
if (orcid != null ? !orcid.equals(that.orcid) : that.orcid != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = orcid != null ? orcid.hashCode() : 0;
result = 31 * result + (creditName != null ? creditName.hashCode() : 0);
result = 31 * result + (email != null ? email.hashCode() : 0);
result = 31 * result + (contributorAttributes != null ? contributorAttributes.hashCode() : 0);
return result;
}
}

View File

@@ -1,79 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class ContributorAttribute {
private ContributorAttributeRole role;
private ContributorAttributeSequence sequence;
public ContributorAttribute(ContributorAttributeRole role, ContributorAttributeSequence sequence) {
this.role = role;
this.sequence = sequence;
}
public ContributorAttributeRole getRole() {
return role;
}
public void setRole(ContributorAttributeRole role) {
this.role = role;
}
public ContributorAttributeSequence getSequence() {
return sequence;
}
public void setSequence(ContributorAttributeSequence sequence) {
this.sequence = sequence;
}
@Override
public String toString() {
return "ContributorAttribute{" +
"role=" + role +
", sequence=" + sequence +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ContributorAttribute that = (ContributorAttribute) o;
if (role != that.role) {
return false;
}
if (sequence != that.sequence) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = role != null ? role.hashCode() : 0;
result = 31 * result + (sequence != null ? sequence.hashCode() : 0);
return result;
}
}

View File

@@ -1,32 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* http://support.orcid.org/knowledgebase/articles/118843-anatomy-of-a-contributor
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public enum ContributorAttributeRole {
AUTHOR,
ASSIGNEE,
EDITOR,
CHAIR_OR_TRANSLATOR,
CO_INVESTIGATOR,
CO_INVENTOR,
GRADUATE_STUDENT,
OTHER_INVENTOR,
PRINCIPAL_INVESTIGATOR,
POSTDOCTORAL_RESEARCHER,
SUPPORT_STAFF
}

View File

@@ -1,23 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* http://support.orcid.org/knowledgebase/articles/118843-anatomy-of-a-contributor
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public enum ContributorAttributeSequence {
FIRST,
ADDITIONAL
}

View File

@@ -1,117 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
import java.util.Set;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Work {
private WorkTitle workTitle;
private String description;
private Citation citation;
private WorkType workType;
private String publicationDate;
private WorkExternalIdentifier workExternalIdentifier;
private String url;
private Set<Contributor> contributors;
private String workSource;
public WorkTitle getWorkTitle() {
return workTitle;
}
public void setWorkTitle(WorkTitle workTitle) {
this.workTitle = workTitle;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Citation getCitation() {
return citation;
}
public void setCitation(Citation citation) {
this.citation = citation;
}
public WorkType getWorkType() {
return workType;
}
public void setWorkType(WorkType workType) {
this.workType = workType;
}
public String getPublicationDate() {
return publicationDate;
}
public void setPublicationDate(String publicationDate) {
this.publicationDate = publicationDate;
}
public WorkExternalIdentifier getWorkExternalIdentifier() {
return workExternalIdentifier;
}
public void setWorkExternalIdentifier(WorkExternalIdentifier workExternalIdentifier) {
this.workExternalIdentifier = workExternalIdentifier;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public Set<Contributor> getContributors() {
return contributors;
}
public void setContributors(Set<Contributor> contributors) {
this.contributors = contributors;
}
public String getWorkSource() {
return workSource;
}
public void setWorkSource(String workSource) {
this.workSource = workSource;
}
@Override
public String toString() {
return "Work{" +
"workTitle=" + workTitle +
", description='" + description + '\'' +
", citation=" + citation +
", workType=" + workType +
", publicationDate='" + publicationDate + '\'' +
", workExternalIdentifier=" + workExternalIdentifier +
", url='" + url + '\'' +
", contributors=" + contributors +
", workSource='" + workSource + '\'' +
'}';
}
}

View File

@@ -1,71 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* http://support.orcid.org/knowledgebase/articles/118807
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class WorkExternalIdentifier {
private WorkExternalIdentifierType workExternalIdentifierType;
private String workExternalIdenfitierID;
public WorkExternalIdentifier(WorkExternalIdentifierType workExternalIdentifierType, String workExternalIdenfitierID) {
this.workExternalIdentifierType = workExternalIdentifierType;
this.workExternalIdenfitierID = workExternalIdenfitierID;
}
public WorkExternalIdentifierType getWorkExternalIdentifierType() {
return workExternalIdentifierType;
}
public void setWorkExternalIdentifierType(WorkExternalIdentifierType workExternalIdentifierType) {
this.workExternalIdentifierType = workExternalIdentifierType;
}
@Override
public String toString() {
return "WorkExternalIdentifier{" +
"workExternalIdentifierType=" + workExternalIdentifierType +
", workExternalIdenfitierID='" + workExternalIdenfitierID + '\'' +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
WorkExternalIdentifier that = (WorkExternalIdentifier) o;
if (workExternalIdenfitierID != null ? !workExternalIdenfitierID.equals(that.workExternalIdenfitierID) : that.workExternalIdenfitierID != null) {
return false;
}
if (workExternalIdentifierType != that.workExternalIdentifierType) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = workExternalIdentifierType != null ? workExternalIdentifierType.hashCode() : 0;
result = 31 * result + (workExternalIdenfitierID != null ? workExternalIdenfitierID.hashCode() : 0);
return result;
}
}

View File

@@ -1,42 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* http://support.orcid.org/knowledgebase/articles/118807
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public enum WorkExternalIdentifierType {
// OTHER_ID,
ARXIV,
ASIN,
ASIN_TLD,
BIBCODE,
DOI,
EID,
ISBN,
ISSN,
JFM,
JSTOR,
LCCN,
MR,
OCLC,
OL,
OSTI,
PMC,
PMID,
RFC,
SSRN,
ZBL
}

View File

@@ -1,64 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
import java.util.Map;
/**
* http://support.orcid.org/knowledgebase/articles/118807
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class WorkTitle {
private String title;
private String subtitle;
private Map<String, String> translatedTitles;
public WorkTitle(String title, String subtitle, Map<String, String> translatedTitles) {
this.title = title;
this.subtitle = subtitle;
this.translatedTitles = translatedTitles;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getSubtitle() {
return subtitle;
}
public void setSubtitle(String subtitle) {
this.subtitle = subtitle;
}
public String getTranslatedTitles(String languageCode) {
return translatedTitles.get(languageCode);
}
public void setTranslatedTitle(String languageCode, String translatedTitle) {
translatedTitles.put(languageCode, translatedTitle);
}
@Override
public String toString() {
return "WorkTitle{" +
"title='" + title + '\'' +
", subtitle='" + subtitle + '\'' +
", translatedTitles=" + translatedTitles +
'}';
}
}

View File

@@ -1,57 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* http://support.orcid.org/knowledgebase/articles/118795
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public enum WorkType {
BOOK,
BOOK_CHAPTER,
BOOK_REVIEW,
DICTIONARY_ENTRY,
DISSERTATION,
ENCYCLOPEDIA_ARTICLE,
EDITED_BOOK,
JOURNAL_ARTICLE,
JOURNAL_ISSUE,
MAGAZINE_ARTICLE,
MANUAL,
ONLINE_RESOURCE,
NEWSLETTER_ARTICLE,
NEWSPAPER_ARTICLE,
REPORT,
RESEARCH_TOOL,
SUPERVISED_STUDENT_PUBLICATION,
TEST,
TRANSLATION,
WEBSITE,
CONFERENCE_ABSTRACT,
CONFERENCE_PAPER,
CONFERENCE_POSTER,
DISCLOSURE,
LICENSE,
PATENT,
REGISTERED_COPYRIGHT,
ARTISTIC_PERFORMANCE,
DATA_SET,
INVENTION,
LECTURE_SPEECH,
RESEARCH_TECHNIQUE,
SPIN_OFF_COMPANY,
STANDARDS_AND_POLICY,
TECHNICAL_STANDARD,
OTHER
}

View File

@@ -8,7 +8,13 @@
package org.dspace.authority.orcid.xml;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
import java.io.InputStream;
import java.net.URISyntaxException;
/**
*
@@ -25,11 +31,15 @@ public abstract class Converter<T> {
*/
private static Logger log = Logger.getLogger(Converter.class);
public abstract T convert(InputStream document);
protected void processError(Document xml) {
String errorMessage = XMLErrors.getErrorMessage(xml);
log.error("The orcid-message reports an error: " + errorMessage);
protected Object unmarshall(InputStream input, Class<?> type) throws SAXException, URISyntaxException {
try {
JAXBContext context = JAXBContext.newInstance(type);
Unmarshaller unmarshaller = context.createUnmarshaller();
return unmarshaller.unmarshal(input);
} catch (JAXBException e) {
throw new RuntimeException("Unable to unmarshall orcid message" + e);
}
}
public abstract T convert(Document document);
}

View File

@@ -1,73 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.xml;
import org.dspace.authority.util.XMLUtils;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import javax.xml.xpath.XPathExpressionException;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class XMLErrors {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(XMLErrors.class);
private static final String ERROR_DESC = "/orcid-message/error-desc";
/**
* Evaluates whether a given xml document contains errors or not.
*
* @param xml The given xml document
* @return true if the given xml document is null
* or if it contains errors
*/
public static boolean check(Document xml) {
if (xml == null) {
return true;
}
String textContent = null;
try {
textContent = XMLUtils.getTextContent(xml, ERROR_DESC);
} catch (XPathExpressionException e) {
log.error("Error while checking for errors in orcid message", e);
}
return textContent == null;
}
public static String getErrorMessage(Document xml) {
if (xml == null) {
return "Did not receive an XML document.";
}
String textContent = null;
try {
textContent = XMLUtils.getTextContent(xml, ERROR_DESC);
} catch (XPathExpressionException e) {
log.error("Error while checking for errors in orcid message", e);
}
return textContent;
}
}

View File

@@ -7,23 +7,22 @@
*/
package org.dspace.authority.orcid.xml;
import org.dspace.authority.orcid.model.Bio;
import org.dspace.authority.orcid.model.BioExternalIdentifier;
import org.dspace.authority.orcid.model.BioName;
import org.dspace.authority.orcid.model.BioResearcherUrl;
import org.dspace.authority.util.XMLUtils;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import javax.xml.xpath.XPathExpressionException;
import org.apache.log4j.Logger;
import org.dspace.authority.orcid.Orcidv2;
import org.dspace.utils.DSpace;
import org.orcid.jaxb.model.common_v2.OrcidId;
import org.orcid.jaxb.model.record_v2.Person;
import org.orcid.jaxb.model.search_v2.Result;
import org.orcid.jaxb.model.search_v2.Search;
import org.xml.sax.SAXException;
import java.io.InputStream;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
@@ -36,218 +35,40 @@ public class XMLtoBio extends Converter {
*/
private static Logger log = Logger.getLogger(XMLtoBio.class);
/**
* orcid-message XPATHs
*/
protected String ORCID_BIO = "//orcid-bio";
// protected String ORCID = "parent::*/orcid";
protected String ORCID = "parent::*/orcid-identifier/path";
protected String PERSONAL_DETAILS = "personal-details";
protected String GIVEN_NAMES = PERSONAL_DETAILS + "/given-names";
protected String FAMILY_NAME = PERSONAL_DETAILS + "/family-name";
protected String CREDIT_NAME = PERSONAL_DETAILS + "/credit-name";
protected String OTHER_NAMES = PERSONAL_DETAILS + "/other-names";
protected String OTHER_NAME = OTHER_NAMES + "/other-name";
protected String CONTACT_DETAILS = "contact-details";
protected String COUNTRY = CONTACT_DETAILS + "/address/country";
protected String KEYWORDS = "keywords";
protected String KEYWORD = KEYWORDS + "/keyword";
protected String EXTERNAL_IDENTIFIERS = "external-identifiers";
protected String EXTERNAL_IDENTIFIER = EXTERNAL_IDENTIFIERS + "/external-identifier";
protected String EXTERNAL_ID_ORCID = "external-id-orcid";
protected String EXTERNAL_ID_COMMNON_NAME = "external-id-common-name";
protected String EXTERNAL_ID_REFERENCE = "external-id-reference";
protected String EXTERNAL_ID_URL = "external-id-url";
protected String RESEARCHER_URLS = "researcher-urls";
protected String RESEARCHER_URL = "researcher-urls/researcher-url";
protected String URL_NAME = "url-name";
protected String URL = "url";
protected String BIOGRAPHY = ORCID_BIO + "/biography";
protected String AFFILIATIONS = ORCID_BIO + "/affiliation";
/**
* Regex
*/
protected String ORCID_NOT_FOUND = "ORCID [\\d-]* not found";
@Override
public List<Bio> convert(Document xml) {
List<Bio> result = new ArrayList<Bio>();
if (XMLErrors.check(xml)) {
try {
Iterator<Node> iterator = XMLUtils.getNodeListIterator(xml, ORCID_BIO);
while (iterator.hasNext()) {
Bio bio = convertBio(iterator.next());
result.add(bio);
}
} catch (XPathExpressionException e) {
log.error("Error in xpath syntax", e);
}
} else {
processError(xml);
}
return result;
}
private Bio convertBio(Node node) {
Bio bio = new Bio();
setOrcid(node,bio);
setPersonalDetails(node, bio);
setContactDetails(node, bio);
setKeywords(node, bio);
setExternalIdentifiers(node, bio);
setResearcherUrls(node, bio);
setBiography(node, bio);
return bio;
}
@Override
protected void processError(Document xml) {
String errorMessage = XMLErrors.getErrorMessage(xml);
if(errorMessage.matches(ORCID_NOT_FOUND))
{
// do something?
}
log.error("The orcid-message reports an error: " + errorMessage);
}
private void setOrcid(Node node, Bio bio) {
public List<Person> convert(InputStream xml) {
List<Person> bios= new ArrayList<>();
try {
String orcid = XMLUtils.getTextContent(node, ORCID);
bio.setOrcid(orcid);
} catch (XPathExpressionException e) {
log.debug("Error in finding the biography in bio xml.", e);
}
}
Orcidv2 connector = new DSpace().getServiceManager().getServiceByName("AuthoritySource", Orcidv2.class);
protected void setBiography(Node xml, Bio bio) {
try {
String biography = XMLUtils.getTextContent(xml, BIOGRAPHY);
bio.setBiography(biography);
} catch (XPathExpressionException e) {
log.error("Error in finding the biography in bio xml.", e);
}
}
protected void setResearcherUrls(Node xml, Bio bio) {
try {
NodeList researcher_urls = XMLUtils.getNodeList(xml, RESEARCHER_URL);
if (researcher_urls != null) {
for (int i = 0; i < researcher_urls.getLength(); i++) {
Node researcher_url = researcher_urls.item(i);
if (researcher_url.getNodeType() != Node.TEXT_NODE) {
String url_name = XMLUtils.getTextContent(researcher_url, URL_NAME);
String url = XMLUtils.getTextContent(researcher_url, URL);
BioResearcherUrl researcherUrl = new BioResearcherUrl(url_name, url);
bio.addResearcherUrl(researcherUrl);
Search search = (Search) unmarshall(xml, Search.class);
for(Result result : search.getResult()){
OrcidId orcidIdentifier = result.getOrcidIdentifier();
if(orcidIdentifier!=null){
log.debug("Found OrcidId=" + orcidIdentifier.toString());
String orcid = orcidIdentifier.getUriPath();
Person bio = connector.getBio(orcid);
if(bio!=null){
bios.add(bio);
}
}
}
} catch (XPathExpressionException e) {
log.error("Error in finding the researcher url in bio xml.", e);
} catch (SAXException | URISyntaxException e) {
log.error(e);
}
return bios;
}
protected void setExternalIdentifiers(Node xml, Bio bio) {
public Person convertSinglePerson(InputStream xml) {
Person person = null;
try {
Iterator<Node> iterator = XMLUtils.getNodeListIterator(xml, EXTERNAL_IDENTIFIER);
while (iterator.hasNext()) {
Node external_identifier = iterator.next();
String id_orcid = XMLUtils.getTextContent(external_identifier, EXTERNAL_ID_ORCID);
String id_common_name = XMLUtils.getTextContent(external_identifier, EXTERNAL_ID_COMMNON_NAME);
String id_reference = XMLUtils.getTextContent(external_identifier, EXTERNAL_ID_REFERENCE);
String id_url = XMLUtils.getTextContent(external_identifier, EXTERNAL_ID_URL);
BioExternalIdentifier externalIdentifier = new BioExternalIdentifier(id_orcid, id_common_name, id_reference, id_url);
bio.addExternalIdentifier(externalIdentifier);
}
} catch (XPathExpressionException e) {
log.error("Error in finding the external identifier in bio xml.", e);
}
}
protected void setKeywords(Node xml, Bio bio) {
try {
NodeList keywords = XMLUtils.getNodeList(xml, KEYWORD);
if (keywords != null) {
for (int i = 0; i < keywords.getLength(); i++) {
String keyword = keywords.item(i).getTextContent();
String[] split = keyword.split(",");
for (String k : split) {
bio.addKeyword(k.trim());
}
}
}
} catch (XPathExpressionException e) {
log.error("Error in finding the keywords in bio xml.", e);
}
}
protected void setContactDetails(Node xml, Bio bio) {
try {
String country = XMLUtils.getTextContent(xml, COUNTRY);
bio.setCountry(country);
} catch (XPathExpressionException e) {
log.error("Error in finding the country in bio xml.", e);
}
}
protected void setPersonalDetails(Node xml, Bio bio) {
BioName name = bio.getName();
try {
String givenNames = XMLUtils.getTextContent(xml, GIVEN_NAMES);
name.setGivenNames(givenNames);
} catch (XPathExpressionException e) {
log.error("Error in finding the given names in bio xml.", e);
}
try {
String familyName = XMLUtils.getTextContent(xml, FAMILY_NAME);
name.setFamilyName(familyName);
} catch (XPathExpressionException e) {
log.error("Error in finding the family name in bio xml.", e);
}
try {
String creditName = XMLUtils.getTextContent(xml, CREDIT_NAME);
name.setCreditName(creditName);
} catch (XPathExpressionException e) {
log.error("Error in finding the credit name in bio xml.", e);
}
try {
Iterator<Node> iterator = XMLUtils.getNodeListIterator(xml, OTHER_NAME);
while (iterator.hasNext()) {
Node otherName = iterator.next();
String textContent = otherName.getTextContent();
name.getOtherNames().add(textContent.trim());
}
} catch (XPathExpressionException e) {
log.error("Error in finding the other names in bio xml.", e);
person = (Person) unmarshall(xml, Person.class);
return person;
} catch (SAXException | URISyntaxException e) {
log.error(e);
}
return null;
}
}

View File

@@ -1,240 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.xml;
import org.dspace.authority.orcid.model.*;
import org.dspace.authority.util.*;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import javax.xml.xpath.XPathExpressionException;
import java.util.*;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class XMLtoWork extends Converter {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(XMLtoWork.class);
/**
* orcid-message XPATHs
*/
protected String ORCID_WORKS = "//orcid-works";
protected String ORCID_WORK = ORCID_WORKS + "/orcid-work";
protected String WORK_TITLE = "work-title";
protected String TITLE = WORK_TITLE + "/title";
protected String SUBTITLE = WORK_TITLE + "/subtitle";
protected String TRANSLATED_TITLES = WORK_TITLE + "/translated-title";
protected String TRANSLATED_TITLES_LANGUAGE = "@language-code";
protected String SHORT_DESCRIPTION = "short-description";
protected String WORK_CITATION = "work-citation";
protected String CITATION_TYPE = WORK_CITATION + "/work-citation-type";
protected String CITATION = WORK_CITATION + "/citation";
protected String WORK_TYPE = "work-type";
protected String PUBLICATION_DATE = "publication-date";
protected String YEAR = PUBLICATION_DATE + "/year";
protected String MONTH = PUBLICATION_DATE + "/month";
protected String DAY = PUBLICATION_DATE + "/day";
protected String WORK_EXTERNAL_IDENTIFIERS = "work-external-identifiers";
protected String WORK_EXTERNAL_IDENTIFIER = WORK_EXTERNAL_IDENTIFIERS + "/work-external-identifier";
protected String WORK_EXTERNAL_IDENTIFIER_TYPE = "work-external-identifier-type";
protected String WORK_EXTERNAL_IDENTIFIER_ID = "work-external-identifier-id";
protected String URL = "url";
protected String WORK_CONTRIBUTOR = "work-contributors";
protected String CONTRIBUTOR = WORK_CONTRIBUTOR+"/contributor";
protected String CONTRIBUTOR_ORCID = "contributor-orcid";
protected String CREDIT_NAME = "credit-name";
protected String CONTRIBUTOR_EMAIL = "contributor-email";
protected String CONTRIBUTOR_ATTRIBUTES = "contributor-attributes";
protected String CONTRIBUTOR_SEQUENCE = "contributor-sequence";
protected String CONTRIBUTOR_ROLE = "contributor-role";
protected String WORK_SOURCE = "work-source";
@Override
public List<Work> convert(Document document) {
List<Work> result = new ArrayList<Work>();
if (XMLErrors.check(document)) {
try {
Iterator<Node> iterator = XMLUtils.getNodeListIterator(document, ORCID_WORK);
while (iterator.hasNext()) {
Work work = convertWork(iterator.next());
result.add(work);
}
} catch (XPathExpressionException e) {
log.error("Error in xpath syntax", e);
}
} else {
processError(document);
}
return result;
}
protected Work convertWork(Node node) throws XPathExpressionException {
Work work = new Work();
setTitle(node, work);
setDescription(node, work);
setCitation(node, work);
setWorkType(node, work);
setPublicationDate(node, work);
setExternalIdentifiers(node, work);
setUrl(node, work);
setContributors(node, work);
setWorkSource(node, work);
return work;
}
protected void setWorkSource(Node node, Work work) throws XPathExpressionException {
String workSource = XMLUtils.getTextContent(node, WORK_SOURCE);
work.setWorkSource(workSource);
}
protected void setContributors(Node node, Work work) throws XPathExpressionException {
Set<Contributor> contributors = new HashSet<Contributor>();
Iterator<Node> iterator = XMLUtils.getNodeListIterator(node, CONTRIBUTOR);
while (iterator.hasNext()) {
Node nextContributorNode = iterator.next();
String orcid = XMLUtils.getTextContent(nextContributorNode, CONTRIBUTOR_ORCID);
String creditName = XMLUtils.getTextContent(nextContributorNode, CREDIT_NAME);
String email = XMLUtils.getTextContent(nextContributorNode, CONTRIBUTOR_EMAIL);
Set<ContributorAttribute> contributorAttributes = new HashSet<ContributorAttribute>();
NodeList attributeNodes = XMLUtils.getNodeList(nextContributorNode, CONTRIBUTOR_ATTRIBUTES);
Iterator<Node> attributesIterator = XMLUtils.getNodeListIterator(attributeNodes);
while (attributesIterator.hasNext()) {
Node nextAttribute = attributesIterator.next();
String roleText = XMLUtils.getTextContent(nextAttribute, CONTRIBUTOR_ROLE);
ContributorAttributeRole role = EnumUtils.lookup(ContributorAttributeRole.class, roleText);
String sequenceText = XMLUtils.getTextContent(nextAttribute, CONTRIBUTOR_SEQUENCE);
ContributorAttributeSequence sequence = EnumUtils.lookup(ContributorAttributeSequence.class, sequenceText);
ContributorAttribute attribute = new ContributorAttribute(role, sequence);
contributorAttributes.add(attribute);
}
Contributor contributor = new Contributor(orcid, creditName, email, contributorAttributes);
contributors.add(contributor);
}
work.setContributors(contributors);
}
protected void setUrl(Node node, Work work) throws XPathExpressionException {
String url = XMLUtils.getTextContent(node, URL);
work.setUrl(url);
}
protected void setExternalIdentifiers(Node node, Work work) throws XPathExpressionException {
Iterator<Node> iterator = XMLUtils.getNodeListIterator(node, WORK_EXTERNAL_IDENTIFIER);
while (iterator.hasNext()) {
Node work_external_identifier = iterator.next();
String typeText = XMLUtils.getTextContent(work_external_identifier, WORK_EXTERNAL_IDENTIFIER_TYPE);
WorkExternalIdentifierType type = EnumUtils.lookup(WorkExternalIdentifierType.class, typeText);
String id = XMLUtils.getTextContent(work_external_identifier, WORK_EXTERNAL_IDENTIFIER_ID);
WorkExternalIdentifier externalID = new WorkExternalIdentifier(type, id);
work.setWorkExternalIdentifier(externalID);
}
}
protected void setPublicationDate(Node node, Work work) throws XPathExpressionException {
String year = XMLUtils.getTextContent(node, YEAR);
String month = XMLUtils.getTextContent(node, MONTH);
String day = XMLUtils.getTextContent(node, DAY);
String publicationDate = year;
if (StringUtils.isNotBlank(month)) {
publicationDate += "-" + month;
if (StringUtils.isNotBlank(day)) {
publicationDate += "-" + day;
}
}
work.setPublicationDate(publicationDate);
}
protected void setWorkType(Node node, Work work) throws XPathExpressionException {
String workTypeText = XMLUtils.getTextContent(node, WORK_TYPE);
WorkType workType = EnumUtils.lookup(WorkType.class, workTypeText);
work.setWorkType(workType);
}
protected void setCitation(Node node, Work work) throws XPathExpressionException {
String typeText = XMLUtils.getTextContent(node, CITATION_TYPE);
CitationType type = EnumUtils.lookup(CitationType.class, typeText);
String citationtext = XMLUtils.getTextContent(node, CITATION);
Citation citation = new Citation(type, citationtext);
work.setCitation(citation);
}
protected void setDescription(Node node, Work work) throws XPathExpressionException {
String description = null;
description = XMLUtils.getTextContent(node, SHORT_DESCRIPTION);
work.setDescription(description);
}
protected void setTitle(Node node, Work work) throws XPathExpressionException {
String title = XMLUtils.getTextContent(node, TITLE);
String subtitle = XMLUtils.getTextContent(node, SUBTITLE);
Map<String, String> translatedTitles = new HashMap<String, String>();
NodeList nodeList = XMLUtils.getNodeList(node, TRANSLATED_TITLES);
Iterator<Node> iterator = XMLUtils.getNodeListIterator(nodeList);
while (iterator.hasNext()) {
Node languageNode = iterator.next();
String language = XMLUtils.getTextContent(languageNode, TRANSLATED_TITLES_LANGUAGE);
String translated_title = XMLUtils.getTextContent(languageNode, ".");
translatedTitles.put(language, translated_title);
}
WorkTitle workTitle = new WorkTitle(title, subtitle, translatedTitles);
work.setWorkTitle(workTitle);
}
}

View File

@@ -7,13 +7,12 @@
*/
package org.dspace.authority.rest;
import org.apache.http.impl.client.HttpClientBuilder;
import org.dspace.authority.util.XMLUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import java.io.InputStream;
import java.util.Scanner;
@@ -38,26 +37,27 @@ public class RESTConnector {
this.url = url;
}
public Document get(String path) {
Document document = null;
public InputStream get(String path, String accessToken) {
InputStream result = null;
path = trimSlashes(path);
String fullPath = url + '/' + path;
HttpGet httpGet = new HttpGet(fullPath);
if(StringUtils.isNotBlank(accessToken)){
httpGet.addHeader("Content-Type", "application/vnd.orcid+xml");
httpGet.addHeader("Authorization","Bearer "+accessToken);
}
try {
HttpClient httpClient = HttpClientBuilder.create().build();
HttpResponse getResponse = httpClient.execute(httpGet);
//do not close this httpClient
result = getResponse.getEntity().getContent();
document = XMLUtils.convertStreamToXML(result);
} catch (Exception e) {
getGotError(e, fullPath);
}
return document;
return result;
}
protected void getGotError(Exception e, String fullPath) {

View File

@@ -7,9 +7,7 @@
*/
package org.dspace.authority.rest;
import org.dspace.authority.AuthorityValue;
import java.util.List;
import org.dspace.authority.SolrAuthorityInterface;
/**
*
@@ -18,7 +16,7 @@ import java.util.List;
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public abstract class RestSource {
public abstract class RestSource implements SolrAuthorityInterface {
protected RESTConnector restConnector;
@@ -26,7 +24,4 @@ public abstract class RestSource {
this.restConnector = new RESTConnector(url);
}
public abstract List<AuthorityValue> queryAuthorities(String text, int max);
public abstract AuthorityValue queryAuthorityID(String id);
}

View File

@@ -244,6 +244,12 @@ public class AuthorizeServiceImpl implements AuthorizeService
return true;
}
// If authorization was given before and cached
Boolean cachedResult = c.getCachedAuthorizationResult(o, action, e);
if (cachedResult != null) {
return cachedResult.booleanValue();
}
// is eperson set? if not, userToCheck = null (anonymous)
EPerson userToCheck = null;
if (e != null)
@@ -254,8 +260,9 @@ public class AuthorizeServiceImpl implements AuthorizeService
// if user is an Admin on this object
DSpaceObject adminObject = useInheritance ? serviceFactory.getDSpaceObjectService(o).getAdminObject(c, o, action) : null;
if (isAdmin(c, adminObject))
if (isAdmin(c, e, adminObject))
{
c.cacheAuthorizedAction(o, action, e, true, null);
return true;
}
}
@@ -297,6 +304,11 @@ public class AuthorizeServiceImpl implements AuthorizeService
if (ignoreCustomPolicies
&& ResourcePolicy.TYPE_CUSTOM.equals(rp.getRpType()))
{
if(c.isReadOnly()) {
//When we are in read-only mode, we will cache authorized actions in a different way
//So we remove this resource policy from the cache.
c.uncacheEntity(rp);
}
continue;
}
@@ -305,20 +317,29 @@ public class AuthorizeServiceImpl implements AuthorizeService
{
if (rp.getEPerson() != null && rp.getEPerson().equals(userToCheck))
{
c.cacheAuthorizedAction(o, action, e, true, rp);
return true; // match
}
if ((rp.getGroup() != null)
&& (groupService.isMember(c, rp.getGroup())))
&& (groupService.isMember(c, e, rp.getGroup())))
{
// group was set, and eperson is a member
// of that group
c.cacheAuthorizedAction(o, action, e, true, rp);
return true;
}
}
if(c.isReadOnly()) {
//When we are in read-only mode, we will cache authorized actions in a different way
//So we remove this resource policy from the cache.
c.uncacheEntity(rp);
}
}
// default authorization is denial
c.cacheAuthorizedAction(o, action, e, false, null);
return false;
}
@@ -349,9 +370,14 @@ public class AuthorizeServiceImpl implements AuthorizeService
@Override
public boolean isAdmin(Context c, DSpaceObject o) throws SQLException
{
return this.isAdmin(c, c.getCurrentUser(), o);
}
@Override
public boolean isAdmin(Context c, EPerson e, DSpaceObject o) throws SQLException
{
// return true if user is an Administrator
if (isAdmin(c))
if (isAdmin(c, e))
{
return true;
}
@@ -361,6 +387,11 @@ public class AuthorizeServiceImpl implements AuthorizeService
return false;
}
Boolean cachedResult = c.getCachedAuthorizationResult(o, Constants.ADMIN, e);
if (cachedResult != null) {
return cachedResult.booleanValue();
}
//
// First, check all Resource Policies directly on this object
//
@@ -371,19 +402,27 @@ public class AuthorizeServiceImpl implements AuthorizeService
// check policies for date validity
if (resourcePolicyService.isDateValid(rp))
{
if (rp.getEPerson() != null && rp.getEPerson().equals(c.getCurrentUser()))
if (rp.getEPerson() != null && rp.getEPerson().equals(e))
{
c.cacheAuthorizedAction(o, Constants.ADMIN, e, true, rp);
return true; // match
}
if ((rp.getGroup() != null)
&& (groupService.isMember(c, rp.getGroup())))
&& (groupService.isMember(c, e, rp.getGroup())))
{
// group was set, and eperson is a member
// of that group
c.cacheAuthorizedAction(o, Constants.ADMIN, e, true, rp);
return true;
}
}
if(c.isReadOnly()) {
//When we are in read-only mode, we will cache authorized actions in a different way
//So we remove this resource policy from the cache.
c.uncacheEntity(rp);
}
}
// If user doesn't have specific Admin permissions on this object,
@@ -393,9 +432,12 @@ public class AuthorizeServiceImpl implements AuthorizeService
DSpaceObject parent = serviceFactory.getDSpaceObjectService(o).getParentObject(c, o);
if (parent != null)
{
return isAdmin(c, parent);
boolean admin = isAdmin(c, e, parent);
c.cacheAuthorizedAction(o, Constants.ADMIN, e, admin, null);
return admin;
}
c.cacheAuthorizedAction(o, Constants.ADMIN, e, false, null);
return false;
}
@@ -418,7 +460,23 @@ public class AuthorizeServiceImpl implements AuthorizeService
return groupService.isMember(c, Group.ADMIN);
}
}
@Override
public boolean isAdmin(Context c, EPerson e) throws SQLException
{
// if we're ignoring authorization, user is member of admin
if (c.ignoreAuthorization())
{
return true;
}
if (e == null)
{
return false; // anonymous users can't be admins....
} else
{
return groupService.isMember(c, e, Group.ADMIN);
}
}
public boolean isCommunityAdmin(Context c) throws SQLException
{
EPerson e = c.getCurrentUser();
@@ -624,7 +682,7 @@ public class AuthorizeServiceImpl implements AuthorizeService
List<Group> groups = new ArrayList<Group>();
for (ResourcePolicy resourcePolicy : policies) {
if(resourcePolicy.getGroup() != null)
if(resourcePolicy.getGroup() != null && resourcePolicyService.isDateValid(resourcePolicy))
{
groups.add(resourcePolicy.getGroup());
}
@@ -642,13 +700,14 @@ public class AuthorizeServiceImpl implements AuthorizeService
@Override
public boolean isAnIdenticalPolicyAlreadyInPlace(Context c, DSpaceObject dso, Group group, int action, int policyID) throws SQLException
{
return findByTypeIdGroupAction(c, dso, group, action, policyID) != null;
return !resourcePolicyService.findByTypeGroupActionExceptId(c, dso, group, action, policyID).isEmpty();
}
@Override
public ResourcePolicy findByTypeIdGroupAction(Context c, DSpaceObject dso, Group group, int action, int policyID) throws SQLException
public ResourcePolicy findByTypeGroupAction(Context c, DSpaceObject dso, Group group, int action)
throws SQLException
{
List<ResourcePolicy> policies = resourcePolicyService.find(c, dso, group, action, policyID);
List<ResourcePolicy> policies = resourcePolicyService.find(c, dso, group, action);
if (CollectionUtils.isNotEmpty(policies))
{
@@ -658,7 +717,6 @@ public class AuthorizeServiceImpl implements AuthorizeService
}
}
/**
* Generate Policies policies READ for the date in input adding reason. New policies are assigned automatically at the groups that
* have right on the collection. E.g., if the anonymous can access the collection policies are assigned to anonymous.
@@ -734,12 +792,19 @@ public class AuthorizeServiceImpl implements AuthorizeService
public ResourcePolicy createOrModifyPolicy(ResourcePolicy policy, Context context, String name, Group group, EPerson ePerson,
Date embargoDate, int action, String reason, DSpaceObject dso) throws AuthorizeException, SQLException
{
ResourcePolicy policyTemp = null;
if (policy != null)
{
List<ResourcePolicy> duplicates = resourcePolicyService.findByTypeGroupActionExceptId(context, dso, group, action, policy.getID());
if (!duplicates.isEmpty())
{
policy = duplicates.get(0);
}
} else {
// if an identical policy (same Action and same Group) is already in place modify it...
policyTemp = findByTypeGroupAction(context, dso, group, action);
}
int policyID = -1;
if (policy != null) policyID = policy.getID();
// if an identical policy (same Action and same Group) is already in place modify it...
ResourcePolicy policyTemp = findByTypeIdGroupAction(context, dso, group, action, policyID);
if (policyTemp != null)
{
policy = policyTemp;

View File

@@ -218,9 +218,9 @@ public class ResourcePolicy implements ReloadableEntity<Integer> {
}
/**
* gets ID for Group referred to by this policy
* gets the Group referred to by this policy
*
* @return groupID, or null if no group set
* @return group, or null if no group set
*/
public Group getGroup()
{
@@ -228,7 +228,7 @@ public class ResourcePolicy implements ReloadableEntity<Integer> {
}
/**
* sets ID for Group referred to by this policy
* sets the Group referred to by this policy
* @param epersonGroup Group
*/
public void setGroup(Group epersonGroup)

View File

@@ -104,13 +104,22 @@ public class ResourcePolicyServiceImpl implements ResourcePolicyService
}
@Override
public List<ResourcePolicy> find(Context c, DSpaceObject dso, Group group, int action, int notPolicyID) throws SQLException {
return resourcePolicyDAO.findByTypeIdGroupAction(c, dso, group, action, notPolicyID);
public List<ResourcePolicy> find(Context c, DSpaceObject dso, Group group, int action) throws SQLException {
return resourcePolicyDAO.findByTypeGroupAction(c, dso, group, action);
}
@Override
public List<ResourcePolicy> find(Context c, EPerson e, List<Group> groups, int action, int type_id) throws SQLException{
return resourcePolicyDAO.findByEPersonGroupTypeIdAction(c, e, groups, action, type_id);
}
@Override
public List<ResourcePolicy> findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group, int action, int notPolicyID)
throws SQLException
{
return resourcePolicyDAO.findByTypeGroupActionExceptId(context, dso, group, action, notPolicyID);
}
/**
* Delete an ResourcePolicy

View File

@@ -34,7 +34,16 @@ public interface ResourcePolicyDAO extends GenericDAO<ResourcePolicy> {
public List<ResourcePolicy> findByDSoAndAction(Context context, DSpaceObject dso, int actionId) throws SQLException;
public List<ResourcePolicy> findByTypeIdGroupAction(Context context, DSpaceObject dso, Group group, int action, int notPolicyID) throws SQLException;
public List<ResourcePolicy> findByTypeGroupAction(Context context, DSpaceObject dso, Group group, int action) throws SQLException;
/**
* Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring IDs with a specific PolicyID.
* This method can be used to detect duplicate ResourcePolicies.
* @param notPolicyID ResourcePolicies with this ID will be ignored while looking out for equal ResourcePolicies.
* @return List of resource policies for the same DSpaceObject, group and action but other policyID.
* @throws SQLException
*/
public List<ResourcePolicy> findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group, int action, int notPolicyID) throws SQLException;
public List<ResourcePolicy> findByEPersonGroupTypeIdAction(Context context, EPerson e, List<Group> groups, int action, int type_id) throws SQLException;

View File

@@ -75,7 +75,7 @@ public class ResourcePolicyDAOImpl extends AbstractHibernateDAO<ResourcePolicy>
}
@Override
public List<ResourcePolicy> findByTypeIdGroupAction(Context context, DSpaceObject dso, Group group, int action, int notPolicyID) throws SQLException {
public List<ResourcePolicy> findByTypeGroupAction(Context context, DSpaceObject dso, Group group, int action) throws SQLException {
Criteria criteria = createCriteria(context, ResourcePolicy.class);
criteria.add(Restrictions.and(
Restrictions.eq("dSpaceObject", dso),
@@ -83,15 +83,21 @@ public class ResourcePolicyDAOImpl extends AbstractHibernateDAO<ResourcePolicy>
Restrictions.eq("actionId", action)
));
criteria.setMaxResults(1);
List<ResourcePolicy> results;
if (notPolicyID != -1)
{
criteria.add(Restrictions.and(Restrictions.not(Restrictions.eq("id", notPolicyID))));
}
return list(criteria);
}
@Override
public List<ResourcePolicy> findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group, int action, int notPolicyID) throws SQLException {
Criteria criteria = createCriteria(context, ResourcePolicy.class);
criteria.add(Restrictions.and(
Restrictions.eq("dSpaceObject", dso),
Restrictions.eq("epersonGroup", group),
Restrictions.eq("actionId", action)
));
criteria.add(Restrictions.and(Restrictions.not(Restrictions.eq("id", notPolicyID))));
return list(criteria);
}
public List<ResourcePolicy> findByEPersonGroupTypeIdAction(Context context, EPerson e, List<Group> groups, int action, int type_id) throws SQLException
{
Criteria criteria = createCriteria(context, ResourcePolicy.class);

View File

@@ -167,11 +167,28 @@ public interface AuthorizeService {
*/
public boolean isAdmin(Context c, DSpaceObject o) throws SQLException;
/**
* Check to see if a specific user is an Administrator of a given object
* within DSpace. Always return {@code true} if the user is a System
* Admin
*
* @param c current context
* @param e the user to check
* @param o current DSpace Object, if <code>null</code> the call will be
* equivalent to a call to the <code>isAdmin(Context c)</code>
* method
* @return {@code true} if the user has administrative privileges on the
* given DSpace object
* @throws SQLException if database error
*/
public boolean isAdmin(Context c, EPerson e, DSpaceObject o) throws SQLException;
/**
* Check to see if the current user is a System Admin. Always return
* {@code true} if c.ignoreAuthorization is set. Anonymous users
* can't be Admins (EPerson set to NULL)
* {@code true} if c.ignoreAuthorization is set. If no EPerson is
* logged in and context.getCurrentUser() returns null, this method
* returns false as anonymous users can never be administrators.
*
* @param c current context
* @return {@code true} if user is an admin or ignore authorization
@@ -179,6 +196,17 @@ public interface AuthorizeService {
* @throws SQLException if database error
*/
public boolean isAdmin(Context c) throws SQLException;
/**
* Check to see if a specific user is system admin. Always return
* {@code true} if c.ignoreAuthorization is set.
*
* @param c current context
* @return {@code true} if user is an admin or ignore authorization
* flag set
* @throws SQLException if database error
*/
public boolean isAdmin(Context c, EPerson e) throws SQLException;
public boolean isCommunityAdmin(Context c) throws SQLException;
@@ -410,8 +438,8 @@ public interface AuthorizeService {
* @throws SQLException if there's a database problem
*/
public boolean isAnIdenticalPolicyAlreadyInPlace(Context c, DSpaceObject o, Group group, int actionID, int policyID) throws SQLException;
public ResourcePolicy findByTypeIdGroupAction(Context c, DSpaceObject dso, Group group, int action, int policyID) throws SQLException;
public ResourcePolicy findByTypeGroupAction(Context c, DSpaceObject dso, Group group, int action) throws SQLException;
/**

View File

@@ -33,11 +33,21 @@ public interface ResourcePolicyService extends DSpaceCRUDService<ResourcePolicy>
public List<ResourcePolicy> find(Context c, DSpaceObject o, int actionId) throws SQLException;
public List<ResourcePolicy> find(Context c, DSpaceObject dso, Group group, int action, int notPolicyID) throws SQLException;
public List<ResourcePolicy> find(Context c, DSpaceObject dso, Group group, int action) throws SQLException;
public List<ResourcePolicy> find(Context context, Group group) throws SQLException;
public List<ResourcePolicy> find(Context c, EPerson e, List<Group> groups, int action, int type_id) throws SQLException;
/**
* Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring IDs with a specific PolicyID.
* This method can be used to detect duplicate ResourcePolicies.
* @param notPolicyID ResourcePolicies with this ID will be ignored while looking out for equal ResourcePolicies.
* @return List of resource policies for the same DSpaceObject, group and action but other policyID.
* @throws SQLException
*/
public List<ResourcePolicy> findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group, int action, int notPolicyID)
throws SQLException;
public String getActionText(ResourcePolicy resourcePolicy);

View File

@@ -393,4 +393,8 @@ public interface BrowseDAO
public boolean isEnableBrowseFrequencies();
public void setEnableBrowseFrequencies(boolean enableBrowseFrequencies);
public void setStartsWith(String startsWith);
public String getStartsWith();
}

View File

@@ -7,18 +7,19 @@
*/
package org.dspace.browse;
import java.sql.SQLException;
import java.util.List;
import java.util.ArrayList;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.sort.SortOption;
import org.dspace.sort.OrderFormat;
import org.dspace.sort.SortOption;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
/**
* This class does most of the actual grunt work of preparing a browse
@@ -408,7 +409,7 @@ public class BrowseEngine
// get the table name that we are going to be getting our data from
// this is the distinct table constrained to either community or collection
dao.setTable(browseIndex.getDistinctTableName());
dao.setStartsWith(StringUtils.lowerCase(scope.getStartsWith()));
// remind the DAO that this is a distinct value browse, so it knows what sort
// of query to build
dao.setDistinct(true);
@@ -463,15 +464,8 @@ public class BrowseEngine
String rawFocusValue = null;
if (offset < 1 && scope.hasJumpToValue() || scope.hasStartsWith())
{
String focusValue = getJumpToValue();
// store the value to tell the Browse Info object which value we are browsing on
rawFocusValue = focusValue;
// make sure the incoming value is normalised
focusValue = normalizeJumpToValue(focusValue);
offset = getOffsetForDistinctValue(focusValue);
rawFocusValue = getJumpToValue();
}

View File

@@ -7,10 +7,7 @@
*/
package org.dspace.browse;
import java.io.Serializable;
import java.sql.SQLException;
import java.util.*;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService;
@@ -18,17 +15,17 @@ import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.discovery.DiscoverFacetField;
import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.*;
import org.dspace.discovery.DiscoverQuery.SORT_ORDER;
import org.dspace.discovery.DiscoverResult;
import org.dspace.discovery.DiscoverResult.FacetResult;
import org.dspace.discovery.DiscoverResult.SearchDocument;
import org.dspace.discovery.SearchService;
import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.configuration.DiscoveryConfigurationParameters;
import org.dspace.services.factory.DSpaceServicesFactory;
import java.io.Serializable;
import java.sql.SQLException;
import java.util.*;
/**
*
* @author Andrea Bollini (CILEA)
@@ -85,6 +82,8 @@ public class SolrBrowseDAO implements BrowseDAO
/** value to start browse from in focus field */
private String focusValue = null;
private String startsWith = null;
/** field to look for value in */
private String valueField = null;
@@ -152,9 +151,16 @@ public class SolrBrowseDAO implements BrowseDAO
addStatusFilter(query);
if (distinct)
{
DiscoverFacetField dff = new DiscoverFacetField(facetField,
DiscoverFacetField dff;
if (StringUtils.isNotBlank(startsWith)) {
dff = new DiscoverFacetField(facetField,
DiscoveryConfigurationParameters.TYPE_TEXT, -1,
DiscoveryConfigurationParameters.SORT.VALUE, startsWith);
} else {
dff = new DiscoverFacetField(facetField,
DiscoveryConfigurationParameters.TYPE_TEXT, -1,
DiscoveryConfigurationParameters.SORT.VALUE);
}
query.addFacetField(dff);
query.setFacetMinCount(1);
query.setMaxResults(0);
@@ -486,6 +492,16 @@ public class SolrBrowseDAO implements BrowseDAO
return focusValue;
}
@Override
public void setStartsWith(String startsWith) {
this.startsWith = startsWith;
}
@Override
public String getStartsWith() {
return startsWith;
}
/*
* (non-Javadoc)
*

View File

@@ -7,11 +7,6 @@
*/
package org.dspace.checker;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Date;
import java.util.Map;
import org.apache.commons.collections.MapUtils;
import org.apache.log4j.Logger;
import org.dspace.checker.factory.CheckerServiceFactory;
@@ -23,6 +18,11 @@ import org.dspace.core.Context;
import org.dspace.storage.bitstore.factory.StorageServiceFactory;
import org.dspace.storage.bitstore.service.BitstreamStorageService;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Date;
import java.util.Map;
/**
* <p>
* Main class for the checksum checker tool, which calculates checksums for each
@@ -127,6 +127,7 @@ public final class CheckerCommand
collector.collect(context, info);
}
context.uncacheEntity(bitstream);
bitstream = dispatcher.next();
}
}

View File

@@ -55,7 +55,7 @@ public class ChecksumHistory implements ReloadableEntity<Long>
private String checksumCalculated;
@ManyToOne
@JoinColumn(name = "result")
@JoinColumn(name = "result", referencedColumnName = "result_code")
private ChecksumResult checksumResult;

View File

@@ -7,20 +7,7 @@
*/
package org.dspace.checker;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Date;
import java.util.GregorianCalendar;
import javax.mail.MessagingException;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.*;
import org.apache.log4j.Logger;
import org.dspace.checker.factory.CheckerServiceFactory;
import org.dspace.checker.service.SimpleReporterService;
@@ -28,6 +15,14 @@ import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.Email;
import javax.mail.MessagingException;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Date;
import java.util.GregorianCalendar;
/**
* <p>
* The email reporter creates and sends emails to an administrator. This only
@@ -184,7 +179,7 @@ public class DailyReportEmailer
try
{
context = new Context();
context = new Context(Context.Mode.READ_ONLY);
// the number of bitstreams in report
int numBitstreams = 0;

View File

@@ -7,6 +7,8 @@
*/
package org.dspace.checker;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.dspace.content.Bitstream;
import javax.persistence.*;
@@ -57,7 +59,7 @@ public class MostRecentChecksum implements Serializable
private boolean bitstreamFound;
@OneToOne
@JoinColumn(name= "result")
@JoinColumn(name= "result", referencedColumnName = "result_code")
private ChecksumResult checksumResult;
/**
@@ -155,4 +157,44 @@ public class MostRecentChecksum implements Serializable
public void setBitstreamFound(boolean bitstreamFound) {
this.bitstreamFound = bitstreamFound;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
MostRecentChecksum that = (MostRecentChecksum) o;
return new EqualsBuilder()
.append(toBeProcessed, that.toBeProcessed)
.append(matchedPrevChecksum, that.matchedPrevChecksum)
.append(infoFound, that.infoFound)
.append(bitstreamFound, that.bitstreamFound)
.append(bitstream, that.bitstream)
.append(expectedChecksum, that.expectedChecksum)
.append(currentChecksum, that.currentChecksum)
.append(processStartDate, that.processStartDate)
.append(processEndDate, that.processEndDate)
.append(checksumAlgorithm, that.checksumAlgorithm)
.append(checksumResult, that.checksumResult)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(bitstream)
.append(toBeProcessed)
.append(expectedChecksum)
.append(currentChecksum)
.append(processStartDate)
.append(processEndDate)
.append(checksumAlgorithm)
.append(matchedPrevChecksum)
.append(infoFound)
.append(bitstreamFound)
.append(checksumResult)
.toHashCode();
}
}

View File

@@ -145,17 +145,14 @@ public final class ResultsPruner
throw new IllegalStateException("Problem parsing duration: "
+ e.getMessage(), e);
}
ChecksumResultCode code = ChecksumResultCode.valueOf(resultCode);
if(code == null)
{
throw new IllegalStateException("Checksum result code not found: " + resultCode);
}
if ("default".equals(resultCode))
{
if ("default".equals(resultCode)) {
rp.setDefaultDuration(duration);
}
else
{
} else {
ChecksumResultCode code = ChecksumResultCode.valueOf(resultCode);
if (code == null) {
throw new IllegalStateException("Checksum result code not found: " + resultCode);
}
rp.addInterested(code, duration);
}
}

View File

@@ -166,7 +166,7 @@ public class BitstreamFormat implements Serializable, ReloadableEntity<Integer>
*
* @return the MIME type
*/
public final String getMIMEType()
public String getMIMEType()
{
return mimetype;
}
@@ -177,7 +177,7 @@ public class BitstreamFormat implements Serializable, ReloadableEntity<Integer>
* @param s
* the new MIME type
*/
public final void setMIMEType(String s)
public void setMIMEType(String s)
{
this.mimetype = s;
}

View File

@@ -95,6 +95,37 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
return bitstreamDAO.findAll(context, Bitstream.class);
}
@Override
public Bitstream clone(Context context, Bitstream bitstream)
throws SQLException
{
// Create a new bitstream with a new ID.
Bitstream clonedBitstream = bitstreamDAO.create(context, new Bitstream());
// Set the internal identifier, file size, checksum, and
// checksum algorithm as same as the given bitstream.
clonedBitstream.setInternalId(bitstream.getInternalId());
clonedBitstream.setSizeBytes(bitstream.getSize());
clonedBitstream.setChecksum(bitstream.getChecksum());
clonedBitstream.setChecksumAlgorithm(bitstream.getChecksumAlgorithm());
try
{
//Update our bitstream but turn off the authorization system since permissions haven't been set at this point in time.
context.turnOffAuthorisationSystem();
update(context, clonedBitstream);
}
catch (AuthorizeException e)
{
log.error(e);
//Can never happen since we turn off authorization before we update
}
finally
{
context.restoreAuthSystemState();
}
return clonedBitstream;
}
@Override
public Bitstream create(Context context, InputStream is) throws IOException, SQLException {
// Store the bits
@@ -248,21 +279,21 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
context.addEvent(new Event(Event.DELETE, Constants.BITSTREAM, bitstream.getID(),
String.valueOf(bitstream.getSequenceID()), getIdentifiers(context, bitstream)));
//Remove our bitstream from all our bundles
final List<Bundle> bundles = bitstream.getBundles();
for (Bundle bundle : bundles) {
bundle.getBitstreams().remove(bitstream);
}
// Remove policies
authorizeService.removeAllPolicies(context, bitstream);
// Remove bitstream itself
bitstream.setDeleted(true);
update(context, bitstream);
//Remove our bitstream from all our bundles
final List<Bundle> bundles = bitstream.getBundles();
for (Bundle bundle : bundles) {
bundle.removeBitstream(bitstream);
}
//Remove all bundles from the bitstream object, clearing the connection in 2 ways
bundles.clear();
// Remove policies only after the bitstream has been updated (otherwise the current user has not WRITE rights)
authorizeService.removeAllPolicies(context, bitstream);
}
@Override

View File

@@ -10,6 +10,7 @@ package org.dspace.content;
import java.sql.SQLException;
import java.util.*;
import org.apache.commons.collections.CollectionUtils;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BundleService;
import org.dspace.core.Constants;
@@ -130,18 +131,40 @@ public class Bundle extends DSpaceObject implements DSpaceObjectLegacySupport
}
/**
* Get the bitstreams in this bundle
* Get a copy of the bitstream list of this bundle
* Note that this is a copy and if you wish to manipulate the bistream list, you should use
* {@ref Bundle.addBitstream}, {@ref Bundle.removeBitstream} or {@ref Bundle.clearBitstreams}
*
* @return the bitstreams
*/
public List<Bitstream> getBitstreams() {
return bitstreams;
List<Bitstream> bitstreamList = new LinkedList<>(this.bitstreams);
return bitstreamList;
}
/**
* Add a new bitstream to this bundle.
* @param bitstream
*/
void addBitstream(Bitstream bitstream){
bitstreams.add(bitstream);
}
/**
* Clear the list of bitstream of this bundle
*/
public void clearBitstreams() {
bitstreams.clear();
}
/**
* Remove the given bitstream from this bundles bitstream list
* @param bitstream The bitstream to remove
*/
public void removeBitstream(Bitstream bitstream) {
bitstreams.remove(bitstream);
}
/**
* Get the items this bundle appears in
*
@@ -215,5 +238,4 @@ public class Bundle extends DSpaceObject implements DSpaceObjectLegacySupport
}
return bundleService;
}
}

View File

@@ -147,6 +147,14 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
return;
}
}
//Ensure that the last modified from the item is triggered !
Item owningItem = (Item) getParentObject(context, bundle);
if(owningItem != null)
{
itemService.updateLastModified(context, owningItem);
itemService.update(context, owningItem);
}
bundle.addBitstream(bitstream);
bitstream.getBundles().add(bundle);
@@ -191,14 +199,18 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
bundle.unsetPrimaryBitstreamID();
}
// Check if we our bitstream is part of a single bundle:
// If so delete it, if not then remove the link between bundle & bitstream
if(bitstream.getBundles().size() == 1)
// Check if our bitstream is part of a single or no bundle.
// Bitstream.getBundles() may be empty (the delete() method clears
// the bundles). We should not delete the bitstream, if it is used
// in another bundle, instead we just remove the link between bitstream
// and this bundle.
if(bitstream.getBundles().size() <= 1)
{
// We don't need to remove the link between bundle & bitstream, this will be handled in the delete() method.
// We don't need to remove the link between bundle & bitstream,
// this will be handled in the delete() method.
bitstreamService.delete(context, bitstream);
}else{
bundle.getBitstreams().remove(bitstream);
bundle.removeBitstream(bitstream);
bitstream.getBundles().remove(bundle);
}
}
@@ -269,29 +281,60 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
public void setOrder(Context context, Bundle bundle, UUID[] bitstreamIds) throws AuthorizeException, SQLException {
authorizeService.authorizeAction(context, bundle, Constants.WRITE);
bundle.getBitstreams().clear();
List<Bitstream> currentBitstreams = bundle.getBitstreams();
List<Bitstream> updatedBitstreams = new ArrayList<Bitstream>();
// Loop through and ensure these Bitstream IDs are all valid. Add them to list of updatedBitstreams.
for (int i = 0; i < bitstreamIds.length; i++) {
UUID bitstreamId = bitstreamIds[i];
Bitstream bitstream = bitstreamService.find(context, bitstreamId);
if(bitstream == null){
// If we have an invalid Bitstream ID, just ignore it, but log a warning
if(bitstream == null) {
//This should never occur but just in case
log.warn(LogManager.getHeader(context, "Invalid bitstream id while changing bitstream order", "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId));
continue;
}
bitstream.getBundles().remove(bundle);
bundle.getBitstreams().add(bitstream);
bitstream.getBundles().add(bundle);
bitstreamService.update(context, bitstream);
// If we have a Bitstream not in the current list, log a warning & exit immediately
if(!currentBitstreams.contains(bitstream))
{
log.warn(LogManager.getHeader(context, "Encountered a bitstream not in this bundle while changing bitstream order. Bitstream order will not be changed.", "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId));
return;
}
updatedBitstreams.add(bitstream);
}
//The order of the bitstreams has changed, ensure that we update the last modified of our item
Item owningItem = (Item) getParentObject(context, bundle);
if(owningItem != null)
// If our lists are different sizes, exit immediately
if(updatedBitstreams.size()!=currentBitstreams.size())
{
itemService.updateLastModified(context, owningItem);
itemService.update(context, owningItem);
log.warn(LogManager.getHeader(context, "Size of old list and new list do not match. Bitstream order will not be changed.", "Bundle: " + bundle.getID()));
return;
}
// As long as the order has changed, update it
if(CollectionUtils.isNotEmpty(updatedBitstreams) && !updatedBitstreams.equals(currentBitstreams))
{
//First clear out the existing list of bitstreams
bundle.clearBitstreams();
// Now add them back in the proper order
for (Bitstream bitstream : updatedBitstreams)
{
bitstream.getBundles().remove(bundle);
bundle.addBitstream(bitstream);
bitstream.getBundles().add(bundle);
bitstreamService.update(context, bitstream);
}
//The order of the bitstreams has changed, ensure that we update the last modified of our item
Item owningItem = (Item) getParentObject(context, bundle);
if(owningItem != null)
{
itemService.updateLastModified(context, owningItem);
itemService.update(context, owningItem);
}
}
}
@@ -399,16 +442,15 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
bundle.getName(), getIdentifiers(context, bundle)));
// Remove bitstreams
Iterator<Bitstream> bitstreams = bundle.getBitstreams().iterator();
while (bitstreams.hasNext()) {
Bitstream bitstream = bitstreams.next();
bitstreams.remove();
List<Bitstream> bitstreams = bundle.getBitstreams();
bundle.clearBitstreams();
for (Bitstream bitstream : bitstreams) {
removeBitstream(context, bundle, bitstream);
}
Iterator<Item> items = bundle.getItems().iterator();
while (items.hasNext()) {
Item item = items.next();
List<Item> items = new LinkedList<>(bundle.getItems());
bundle.getItems().clear();
for (Item item : items) {
item.removeBundle(bundle);
}

View File

@@ -7,16 +7,18 @@
*/
package org.dspace.content;
import org.dspace.content.comparator.NameAscendingComparator;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.core.*;
import org.dspace.eperson.Group;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import org.hibernate.proxy.HibernateProxyHelper;
import javax.persistence.*;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.*;
import org.dspace.authorize.AuthorizeException;
/**
* Class representing a collection.
@@ -34,6 +36,8 @@ import java.util.List;
*/
@Entity
@Table(name="collection")
@Cacheable
@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, include = "non-lazy")
public class Collection extends DSpaceObject implements DSpaceObjectLegacySupport
{
@@ -83,7 +87,7 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
joinColumns = {@JoinColumn(name = "collection_id") },
inverseJoinColumns = {@JoinColumn(name = "community_id") }
)
private final List<Community> communities = new ArrayList<>();
private Set<Community> communities = new HashSet<>();
@Transient
private transient CollectionService collectionService;
@@ -263,7 +267,11 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
*/
public List<Community> getCommunities() throws SQLException
{
return communities;
// We return a copy because we do not want people to add elements to this collection directly.
// We return a list to maintain backwards compatibility
Community[] output = communities.toArray(new Community[]{});
Arrays.sort(output, new NameAscendingComparator());
return Arrays.asList(output);
}
void addCommunity(Community community) {
@@ -271,7 +279,7 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
setModified();
}
void removeCommunity(Community community){
void removeCommunity(Community community) {
this.communities.remove(community);
setModified();
}
@@ -328,9 +336,10 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
return Constants.COLLECTION;
}
public void setWorkflowGroup(int step, Group g)
public void setWorkflowGroup(Context context, int step, Group g)
throws SQLException, AuthorizeException
{
getCollectionService().setWorkflowGroup(this, step, g);
getCollectionService().setWorkflowGroup(context, this, step, g);
}
@Override
@@ -345,4 +354,4 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
}
return collectionService;
}
}
}

View File

@@ -32,6 +32,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.*;
import org.dspace.authorize.service.ResourcePolicyService;
/**
* Service implementation for the Collection object.
@@ -51,6 +52,8 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
@Autowired(required = true)
protected AuthorizeService authorizeService;
@Autowired(required = true)
protected ResourcePolicyService resourcePolicyService;
@Autowired(required = true)
protected BitstreamService bitstreamService;
@Autowired(required = true)
protected ItemService itemService;
@@ -334,30 +337,77 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
groupService.setName(g,
"COLLECTION_" + collection.getID() + "_WORKFLOW_STEP_" + step);
groupService.update(context, g);
setWorkflowGroup(collection, step, g);
setWorkflowGroup(context, collection, step, g);
authorizeService.addPolicy(context, collection, Constants.ADD, g);
}
return getWorkflowGroup(collection, step);
}
@Override
public void setWorkflowGroup(Collection collection, int step, Group group) {
public void setWorkflowGroup(Context context, Collection collection, int step, Group group)
throws SQLException, AuthorizeException
{
// we need to store the old group to be able to revoke permissions if granted before
Group oldGroup = null;
int action;
switch (step)
{
case 1:
oldGroup = collection.getWorkflowStep1();
action = Constants.WORKFLOW_STEP_1;
collection.setWorkflowStep1(group);
break;
case 2:
oldGroup = collection.getWorkflowStep2();
action = Constants.WORKFLOW_STEP_2;
collection.setWorkflowStep2(group);
break;
case 3:
oldGroup = collection.getWorkflowStep3();
action = Constants.WORKFLOW_STEP_3;
collection.setWorkflowStep3(group);
break;
default:
throw new IllegalArgumentException("Illegal step count: " + step);
}
// deal with permissions.
try
{
context.turnOffAuthorisationSystem();
// remove the policies for the old group
if (oldGroup != null)
{
Iterator<ResourcePolicy> oldPolicies =
resourcePolicyService.find(context, collection, oldGroup, action).iterator();
while (oldPolicies.hasNext())
{
resourcePolicyService.delete(context, oldPolicies.next());
}
oldPolicies = resourcePolicyService.find(context, collection, oldGroup, Constants.ADD).iterator();
while (oldPolicies.hasNext())
{
ResourcePolicy rp = oldPolicies.next();
if (rp.getRpType() == ResourcePolicy.TYPE_WORKFLOW)
{
resourcePolicyService.delete(context, rp);
}
}
}
// group can be null to delete workflow step.
// we need to grant permissions if group is not null
if (group != null)
{
authorizeService.addPolicy(context, collection, action, group, ResourcePolicy.TYPE_WORKFLOW);
authorizeService.addPolicy(context, collection, Constants.ADD, group, ResourcePolicy.TYPE_WORKFLOW);
}
} finally {
context.restoreAuthSystemState();
}
collection.setModified();
}
@Override
@@ -749,8 +799,8 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
while (owningCommunities.hasNext())
{
Community owningCommunity = owningCommunities.next();
owningCommunities.remove();
owningCommunity.getCollections().remove(collection);
collection.removeCommunity(owningCommunity);
owningCommunity.removeCollection(collection);
}
collectionDAO.delete(context, collection);

View File

@@ -9,10 +9,12 @@ package org.dspace.content;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.log4j.Logger;
import org.dspace.content.comparator.NameAscendingComparator;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CommunityService;
import org.dspace.core.*;
import org.dspace.eperson.Group;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import org.hibernate.proxy.HibernateProxyHelper;
import javax.persistence.*;
@@ -30,6 +32,8 @@ import java.util.*;
*/
@Entity
@Table(name="community")
@Cacheable
@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, include = "non-lazy")
public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
{
/** log4j category */
@@ -44,13 +48,13 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
joinColumns = {@JoinColumn(name = "parent_comm_id") },
inverseJoinColumns = {@JoinColumn(name = "child_comm_id") }
)
private final List<Community> subCommunities = new ArrayList<>();
private Set<Community> subCommunities = new HashSet<>();
@ManyToMany(fetch = FetchType.LAZY, mappedBy = "subCommunities")
private List<Community> parentCommunities = new ArrayList<>();
private Set<Community> parentCommunities = new HashSet<>();
@ManyToMany(fetch = FetchType.LAZY, mappedBy = "communities", cascade = {CascadeType.PERSIST})
private final List<Collection> collections = new ArrayList<>();
private Set<Collection> collections = new HashSet<>();
@OneToOne
@JoinColumn(name = "admin")
@@ -85,13 +89,13 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
void addSubCommunity(Community subCommunity)
{
getSubcommunities().add(subCommunity);
subCommunities.add(subCommunity);
setModified();
}
void removeSubCommunity(Community subCommunity)
{
getSubcommunities().remove(subCommunity);
subCommunities.remove(subCommunity);
setModified();
}
@@ -140,17 +144,21 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
*/
public List<Collection> getCollections()
{
return collections;
// We return a copy because we do not want people to add elements to this collection directly.
// We return a list to maintain backwards compatibility
Collection[] output = collections.toArray(new Collection[]{});
Arrays.sort(output, new NameAscendingComparator());
return Arrays.asList(output);
}
void addCollection(Collection collection)
{
getCollections().add(collection);
collections.add(collection);
}
void removeCollection(Collection collection)
{
getCollections().remove(collection);
collections.remove(collection);
}
/**
@@ -162,7 +170,11 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
*/
public List<Community> getSubcommunities()
{
return subCommunities;
// We return a copy because we do not want people to add elements to this collection directly.
// We return a list to maintain backwards compatibility
Community[] output = subCommunities.toArray(new Community[]{});
Arrays.sort(output, new NameAscendingComparator());
return Arrays.asList(output);
}
/**
@@ -173,16 +185,25 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
*/
public List<Community> getParentCommunities()
{
return parentCommunities;
// We return a copy because we do not want people to add elements to this collection directly.
// We return a list to maintain backwards compatibility
Community[] output = parentCommunities.toArray(new Community[]{});
Arrays.sort(output, new NameAscendingComparator());
return Arrays.asList(output);
}
void addParentCommunity(Community parentCommunity) {
getParentCommunities().add(parentCommunity);
parentCommunities.add(parentCommunity);
}
void clearParentCommunities(){
this.parentCommunities.clear();
this.parentCommunities = null;
parentCommunities.clear();
}
public void removeParentCommunity(Community parentCommunity)
{
parentCommunities.remove(parentCommunity);
setModified();
}
/**

View File

@@ -452,12 +452,9 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
ArrayList<String> removedIdentifiers = getIdentifiers(context, childCommunity);
String removedHandle = childCommunity.getHandle();
UUID removedId = childCommunity.getID();
rawDelete(context, childCommunity);
childCommunity.getParentCommunities().remove(parentCommunity);
parentCommunity.removeSubCommunity(childCommunity);
log.info(LogManager.getHeader(context, "remove_subcommunity",
"parent_comm_id=" + parentCommunity.getID() + ",child_comm_id=" + childCommunity.getID()));
@@ -492,7 +489,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
Iterator<Community> subcommunities = community.getSubcommunities().iterator();
while (subcommunities.hasNext()) {
Community subCommunity = subcommunities.next();
subcommunities.remove();
community.removeSubCommunity(subCommunity);
delete(context, subCommunity);
}
// now let the parent remove the community
@@ -535,7 +532,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
while (collections.hasNext())
{
Collection collection = collections.next();
collections.remove();
community.removeCollection(collection);
removeCollection(context, community, collection);
}
// delete subcommunities
@@ -544,7 +541,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
while (subCommunities.hasNext())
{
Community subComm = subCommunities.next();
subCommunities.remove();
community.removeSubCommunity(subComm);
delete(context, subComm);
}
@@ -553,9 +550,16 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
// Remove any Handle
handleService.unbindHandle(context, community);
// Remove the parent-child relationship for the community we want ot delete
Community parent = (Community) getParentObject(context, community);
if (parent != null) {
community.removeParentCommunity(parent);
parent.removeSubCommunity(community);
}
Group g = community.getAdministrators();
// Delete community row
communityDAO.delete(context, community);

View File

@@ -7,17 +7,18 @@
*/
package org.dspace.content;
import org.dspace.content.comparator.NameAscendingComparator;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.hibernate.annotations.Sort;
import org.hibernate.annotations.SortType;
import org.hibernate.proxy.HibernateProxyHelper;
import javax.persistence.*;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.*;
/**
* Class representing an item in DSpace.
@@ -78,7 +79,7 @@ public class Item extends DSpaceObject implements DSpaceObjectLegacySupport
joinColumns = {@JoinColumn(name = "item_id") },
inverseJoinColumns = {@JoinColumn(name = "collection_id") }
)
private final List<Collection> collections = new ArrayList<>();
private final Set<Collection> collections = new HashSet<>();
@ManyToMany(fetch = FetchType.LAZY, mappedBy = "items")
private final List<Bundle> bundles = new ArrayList<>();
@@ -224,23 +225,31 @@ public class Item extends DSpaceObject implements DSpaceObjectLegacySupport
}
/**
* Get the collections this item is in. The order is indeterminate.
* Get the collections this item is in. The order is sorted ascending by collection name.
*
* @return the collections this item is in, if any.
*/
public List<Collection> getCollections()
{
return collections;
// We return a copy because we do not want people to add elements to this collection directly.
// We return a list to maintain backwards compatibility
Collection[] output = collections.toArray(new Collection[]{});
Arrays.sort(output, new NameAscendingComparator());
return Arrays.asList(output);
}
void addCollection(Collection collection)
{
getCollections().add(collection);
collections.add(collection);
}
void removeCollection(Collection collection)
{
getCollections().remove(collection);
collections.remove(collection);
}
public void clearCollections(){
collections.clear();
}
public Collection getTemplateItemOf() {
@@ -262,6 +271,31 @@ public class Item extends DSpaceObject implements DSpaceObjectLegacySupport
return bundles;
}
/**
* Get the bundles matching a bundle name (name corresponds roughly to type)
*
* @param name
* name of bundle (ORIGINAL/TEXT/THUMBNAIL)
*
* @return the bundles in an unordered array
*/
public List<Bundle> getBundles(String name)
{
List<Bundle> matchingBundles = new ArrayList<Bundle>();
// now only keep bundles with matching names
List<Bundle> bunds = getBundles();
for (Bundle bundle : bunds)
{
if (name.equals(bundle.getName()))
{
matchingBundles.add(bundle);
}
}
return matchingBundles;
}
/**
* Add a bundle to the item, should not be made public since we don't want to skip business logic
* @param bundle the bundle to be added
@@ -289,35 +323,35 @@ public class Item extends DSpaceObject implements DSpaceObjectLegacySupport
* @return <code>true</code> if object passed in represents the same item
* as this object
*/
@Override
public boolean equals(Object obj)
{
if (obj == null)
{
return false;
}
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj);
if (this.getClass() != objClass)
{
return false;
}
final Item otherItem = (Item) obj;
if (!this.getID().equals(otherItem.getID()))
{
return false;
}
@Override
public boolean equals(Object obj)
{
if (obj == null)
{
return false;
}
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj);
if (this.getClass() != objClass)
{
return false;
}
final Item otherItem = (Item) obj;
if (!this.getID().equals(otherItem.getID()))
{
return false;
}
return true;
}
return true;
}
@Override
public int hashCode()
{
int hash = 5;
hash += 71 * hash + getType();
hash += 71 * hash + getID().hashCode();
return hash;
}
@Override
public int hashCode()
{
int hash = 5;
hash += 71 * hash + getType();
hash += 71 * hash + getID().hashCode();
return hash;
}
/**
* return type found in Constants

View File

@@ -237,13 +237,25 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
return itemDAO.findAllByCollection(context, collection);
}
@Override
public Iterator<Item> findAllByCollection(Context context, Collection collection, Integer limit, Integer offset) throws SQLException {
return itemDAO.findAllByCollection(context, collection, limit, offset);
}
@Override
public Iterator<Item> findInArchiveOrWithdrawnDiscoverableModifiedSince(Context context, Date since)
throws SQLException
{
return itemDAO.findAll(context, true, true, true, since);
}
@Override
public Iterator<Item> findInArchiveOrWithdrawnNonDiscoverableModifiedSince(Context context, Date since)
throws SQLException
{
return itemDAO.findAll(context, true, true, false, since);
}
@Override
public void updateLastModified(Context context, Item item) throws SQLException, AuthorizeException {
item.setLastModified(new Date());
@@ -651,7 +663,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
}
//Only clear collections after we have removed everything else from the item
item.getCollections().clear();
item.clearCollections();
item.setOwningCollection(null);
// Finally remove item row
@@ -1189,6 +1201,11 @@ prevent the generation of resource policy entry values with null dspace_object a
return itemDAO.countItems(context, collection, true, false);
}
@Override
public int countAllItems(Context context, Collection collection) throws SQLException {
return itemDAO.countItems(context, collection, true, false) + itemDAO.countItems(context, collection, false, true);
}
@Override
public int countItems(Context context, Community community) throws SQLException {
// First we need a list of all collections under this community in the hierarchy
@@ -1197,6 +1214,15 @@ prevent the generation of resource policy entry values with null dspace_object a
// Now, lets count unique items across that list of collections
return itemDAO.countItems(context, collections, true, false);
}
@Override
public int countAllItems(Context context, Community community) throws SQLException {
// First we need a list of all collections under this community in the hierarchy
List<Collection> collections = communityService.getAllCollections(context, community);
// Now, lets count unique items across that list of collections
return itemDAO.countItems(context, collections, true, false) + itemDAO.countItems(context, collections, false, true);
}
@Override
protected void getAuthoritiesAndConfidences(String fieldKey, Collection collection, List<String> values, List<String> authorities, List<Integer> confidences, int i) {

View File

@@ -21,6 +21,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
/**
@@ -98,7 +99,7 @@ public class MetadataValueServiceImpl implements MetadataValueService {
}
@Override
public List<MetadataValue> findByValueLike(Context context, String value) throws SQLException {
public Iterator<MetadataValue> findByValueLike(Context context, String value) throws SQLException {
return metadataValueDAO.findByValueLike(context, value);
}

View File

@@ -12,7 +12,9 @@ import org.dspace.content.service.SiteService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import javax.persistence.Cacheable;
import javax.persistence.Entity;
import javax.persistence.Table;
import javax.persistence.Transient;
@@ -22,6 +24,8 @@ import javax.persistence.Transient;
* By default, the handle suffix "0" represents the Site, e.g. "1721.1/0"
*/
@Entity
@Cacheable
@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE)
@Table(name = "site")
public class Site extends DSpaceObject
{

View File

@@ -93,11 +93,8 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
Item item = itemService.create(context, workspaceItem);
item.setSubmitter(context.getCurrentUser());
// Now create the policies for the submitter and workflow
// users to modify item and contents
// Now create the policies for the submitter to modify item and contents
// contents = bitstreams, bundles
// FIXME: icky hardcoded workflow steps
workflowService.addInitialWorkspaceItemPolicies(context, workspaceItem);
// read permission
authorizeService.addPolicy(context, item, Constants.READ, item.getSubmitter(), ResourcePolicy.TYPE_SUBMISSION);
// write permission

View File

@@ -168,7 +168,11 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Choic
{
init();
log.debug("Getting matches for '" + text + "'");
String xpathExpression = String.format(xpathTemplate, text.replaceAll("'", "&apos;").toLowerCase());
String xpathExpression = "";
String[] textHierarchy = text.split(hierarchyDelimiter, -1);
for (int i = 0; i < textHierarchy.length; i++) {
xpathExpression += String.format(xpathTemplate, textHierarchy[i].replaceAll("'", "&apos;").toLowerCase());
}
XPath xpath = XPathFactory.newInstance().newXPath();
Choice[] choices;
try {

View File

@@ -7,10 +7,6 @@
*/
package org.dspace.content.authority;
import org.dspace.authority.AuthoritySearchService;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.rest.RestSource;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery;
@@ -18,6 +14,10 @@ import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.params.CommonParams;
import org.dspace.authority.AuthoritySearchService;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.SolrAuthorityInterface;
import org.dspace.authority.service.AuthorityValueService;
import org.dspace.content.Collection;
import org.dspace.core.ConfigurationManager;
@@ -38,7 +38,8 @@ import java.util.Map;
public class SolrAuthority implements ChoiceAuthority {
private static final Logger log = Logger.getLogger(SolrAuthority.class);
protected RestSource source = DSpaceServicesFactory.getInstance().getServiceManager().getServiceByName("AuthoritySource", RestSource.class);
protected SolrAuthorityInterface source = DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName("AuthoritySource", SolrAuthorityInterface.class);
protected boolean externalResults = false;
protected final AuthorityValueService authorityValueService = AuthorityServiceFactory.getInstance().getAuthorityValueService();

View File

@@ -0,0 +1,39 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.comparator;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.StringUtils;
import org.dspace.content.DSpaceObject;
import java.util.Comparator;
public class NameAscendingComparator implements Comparator<DSpaceObject>{
@Override
public int compare(DSpaceObject dso1, DSpaceObject dso2) {
if (dso1 == dso2){
return 0;
}else if (dso1 == null){
return -1;
}else if (dso2 == null){
return 1;
}else {
String name1 = StringUtils.trimToEmpty(dso1.getName());
String name2 = StringUtils.trimToEmpty(dso2.getName());
//When two DSO's have the same name, use their UUID to put them in an order
if(name1.equals(name2)) {
return ObjectUtils.compare(dso1.getID(), dso2.getID());
} else {
return name1.compareToIgnoreCase(name2);
}
}
}
}

View File

@@ -32,6 +32,8 @@ import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.SelfNamedPlugin;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.jdom.Attribute;
import org.jdom.Document;
import org.jdom.Element;
@@ -89,6 +91,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
protected final CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
protected final CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
protected final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
protected final HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
/**
* Fill in the plugin alias table from DSpace configuration entries
@@ -455,7 +458,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
{
List<MockMetadataValue> metadata = new ArrayList<>();
String identifier_uri = "http://hdl.handle.net/"
String identifier_uri = handleService.getCanonicalPrefix()
+ site.getHandle();
String title = site.getName();
String url = site.getURL();
@@ -493,7 +496,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
String description = communityService.getMetadata(community, "introductory_text");
String description_abstract = communityService.getMetadata(community, "short_description");
String description_table = communityService.getMetadata(community,"side_bar_text");
String identifier_uri = "http://hdl.handle.net/"
String identifier_uri = handleService.getCanonicalPrefix()
+ community.getHandle();
String rights = communityService.getMetadata(community,"copyright_text");
String title = communityService.getMetadata(community,"name");
@@ -543,7 +546,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
String description = collectionService.getMetadata(collection, "introductory_text");
String description_abstract = collectionService.getMetadata(collection, "short_description");
String description_table = collectionService.getMetadata(collection, "side_bar_text");
String identifier_uri = "http://hdl.handle.net/"
String identifier_uri = handleService.getCanonicalPrefix()
+ collection.getHandle();
String provenance = collectionService.getMetadata(collection, "provenance_description");
String rights = collectionService.getMetadata(collection, "copyright_text");

View File

@@ -179,7 +179,7 @@ public class XSLTIngestionCrosswalk
}
try
{
JDOMSource source = new JDOMSource(new Document((Element)root.cloneContent()));
JDOMSource source = new JDOMSource(new Document((Element)root.clone()));
JDOMResult result = new JDOMResult();
xform.transform(source, result);
Document dimDoc = result.getDocument();

View File

@@ -57,6 +57,8 @@ public interface ItemDAO extends DSpaceObjectLegacySupportDAO<Item>
public Iterator<Item> findAllByCollection(Context context, Collection collection) throws SQLException;
public Iterator<Item> findAllByCollection(Context context, Collection collection, Integer limit, Integer offset) throws SQLException;
/**
* Count number of items in a given collection
* @param context context

View File

@@ -13,6 +13,7 @@ import org.dspace.core.Context;
import org.dspace.core.GenericDAO;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
/**
@@ -26,7 +27,7 @@ public interface MetadataValueDAO extends GenericDAO<MetadataValue> {
public List<MetadataValue> findByField(Context context, MetadataField fieldId) throws SQLException;
public List<MetadataValue> findByValueLike(Context context, String value) throws SQLException;
public Iterator<MetadataValue> findByValueLike(Context context, String value) throws SQLException;
public void deleteByMetadataField(Context context, MetadataField metadataField) throws SQLException;

View File

@@ -121,6 +121,8 @@ public class CollectionDAOImpl extends AbstractHibernateDSODAO<Collection> imple
Restrictions.eq("resourcePolicy.eperson", ePerson),
actionQuery
));
criteria.setCacheable(true);
return list(criteria);
}
@@ -160,6 +162,8 @@ public class CollectionDAOImpl extends AbstractHibernateDSODAO<Collection> imple
query.append(" AND rp.epersonGroup.id IN (select g.id from Group g where (from EPerson e where e.id = :eperson_id) in elements(epeople))");
Query hibernateQuery = createQuery(context, query.toString());
hibernateQuery.setParameter("eperson_id", ePerson.getID());
hibernateQuery.setCacheable(true);
return list(hibernateQuery);

View File

@@ -91,6 +91,7 @@ public class CommunityDAOImpl extends AbstractHibernateDSODAO<Community> impleme
Query query = createQuery(context, queryBuilder.toString());
query.setParameter(sortField.toString(), sortField.getID());
query.setCacheable(true);
return findMany(context, query);
}
@@ -129,6 +130,8 @@ public class CommunityDAOImpl extends AbstractHibernateDSODAO<Community> impleme
Restrictions.eq("resourcePolicy.eperson", ePerson),
actionQuery
));
criteria.setCacheable(true);
return list(criteria);
}
@@ -164,6 +167,8 @@ public class CommunityDAOImpl extends AbstractHibernateDSODAO<Community> impleme
query.append(" AND rp.epersonGroup.id IN (select g.id from Group g where (from EPerson e where e.id = :eperson_id) in elements(epeople))");
Query hibernateQuery = createQuery(context, query.toString());
hibernateQuery.setParameter("eperson_id", ePerson.getID());
hibernateQuery.setCacheable(true);
return list(hibernateQuery);
}

View File

@@ -104,7 +104,8 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
addMetadataLeftJoin(query, Item.class.getSimpleName().toLowerCase(), Collections.singletonList(metadataField));
query.append(" WHERE item.inArchive = :in_archive");
query.append(" AND item.submitter =:submitter");
addMetadataSortQuery(query, Collections.singletonList(metadataField), null);
//submissions should sort in reverse by date by default
addMetadataSortQuery(query, Collections.singletonList(metadataField), null, Collections.singletonList("desc"));
Query hibernateQuery = createQuery(context, query.toString());
hibernateQuery.setParameter(metadataField.toString(), metadataField.getID());
@@ -232,6 +233,24 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
return iterate(query);
}
@Override
public Iterator<Item> findAllByCollection(Context context, Collection collection, Integer limit, Integer offset) throws SQLException {
Query query = createQuery(context, "select i from Item i join i.collections c WHERE :collection IN c");
query.setParameter("collection", collection);
if(offset != null)
{
query.setFirstResult(offset);
}
if(limit != null)
{
query.setMaxResults(limit);
}
return iterate(query);
}
@Override
public int countItems(Context context, Collection collection, boolean includeArchived, boolean includeWithdrawn) throws SQLException {
Query query = createQuery(context, "select count(i) from Item i join i.collections c WHERE :collection IN c AND i.inArchive=:in_archive AND i.withdrawn=:withdrawn");

View File

@@ -18,6 +18,7 @@ import org.hibernate.Query;
import org.hibernate.criterion.Restrictions;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
/**
@@ -48,14 +49,14 @@ public class MetadataValueDAOImpl extends AbstractHibernateDAO<MetadataValue> im
}
@Override
public List<MetadataValue> findByValueLike(Context context, String value) throws SQLException {
Criteria criteria = createCriteria(context, MetadataValue.class);
criteria.add(
Restrictions.like("value", "%" + value + "%")
);
criteria.setFetchMode("metadataField", FetchMode.JOIN);
public Iterator<MetadataValue> findByValueLike(Context context, String value) throws SQLException {
String queryString = "SELECT m FROM MetadataValue m JOIN m.metadataField f " +
"WHERE m.value like concat('%', concat(:searchString,'%')) ORDER BY m.id ASC";
return list(criteria);
Query query = createQuery(context, queryString);
query.setString("searchString", value);
return iterate(query);
}
@Override

View File

@@ -32,6 +32,7 @@ public class SiteDAOImpl extends AbstractHibernateDAO<Site> implements SiteDAO
@Override
public Site findSite(Context context) throws SQLException {
Criteria criteria = createCriteria(context, Site.class);
criteria.setCacheable(true);
return uniqueResult(criteria);
}
}

View File

@@ -27,6 +27,22 @@ public interface BitstreamService extends DSpaceObjectService<Bitstream>, DSpace
public List<Bitstream> findAll(Context context) throws SQLException;
/**
* Clone the given bitstream by firstly creating a new bitstream, with a new ID.
* Then set the internal identifier, file size, checksum, and
* checksum algorithm as same as the given bitstream.
* This allows multiple bitstreams to share the same internal identifier of assets .
* An example of such a use case scenario is versioning.
*
* @param context
* DSpace context object
* @param bitstream
* Bitstream to be cloned
* @return the clone
* @throws SQLException if database error
*/
public Bitstream clone(Context context, Bitstream bitstream) throws SQLException;
/**
* Create a new bitstream, with a new ID. The checksum and file size are
* calculated. No authorization checks are made in this method.
@@ -62,7 +78,7 @@ public interface BitstreamService extends DSpaceObjectService<Bitstream>, DSpace
* @throws AuthorizeException if authorization error
*/
public Bitstream create(Context context, Bundle bundle, InputStream is) throws IOException, SQLException, AuthorizeException;
/**
* Register a new bitstream, with a new ID. The checksum and file size
* are calculated. The newly created bitstream has the "unknown"

View File

@@ -161,7 +161,8 @@ public interface CollectionService extends DSpaceObjectService<Collection>, DSpa
* @param group
* the new workflow group, or <code>null</code>
*/
public void setWorkflowGroup(Collection collection, int step, Group group);
public void setWorkflowGroup(Context context, Collection collection, int step, Group group)
throws SQLException, AuthorizeException;
/**
* Get the the workflow group corresponding to a particular workflow step.

Some files were not shown because too many files have changed in this diff Show More