Compare commits

...

491 Commits

Author SHA1 Message Date
Tim Donohue
d1795b598a [maven-release-plugin] prepare release dspace-5.11 2022-07-28 11:04:16 -05:00
Kim Shepherd
6f75bb084a [DS-4453] Discovery autocomplete HTML escaping (JSPUI) 2022-07-27 09:12:22 +12:00
Kim Shepherd
d1dd7d2332 [DS-4132] JPSUI resumable upload dir validation 2022-07-26 16:33:34 +12:00
Kim Shepherd
c89e493e51 [DS-4453] Escape spellcheck, autocomplete HTML (JSPUI) 2022-07-26 16:33:25 +12:00
Kim Shepherd
28eb815821 [DS-4383] Request Item Servlet escape HTML 2022-07-26 16:33:17 +12:00
Kim Shepherd
56e7604918 [DS-4131] Better path handling in ItemImport zips 2022-07-26 16:18:05 +12:00
Tim Donohue
73cdff26fd Merge pull request #8420 from tdonohue/prep_for_5.11
[Prep for 5.11] Fix DSpace 5.x branch to make it releasable again.  Add Docker Compose for easier testing.
2022-07-25 12:14:38 -05:00
Tim Donohue
aee9386468 Fix missing license header errors and warnings 2022-07-25 11:37:39 -05:00
Tim Donohue
6b1a118f39 Bug fixes to Docker CLI containers. Ensure scripts run properly. 2022-07-25 11:37:39 -05:00
Tim Donohue
26f534e9ca Ensure bin scripts ALWAYS have correct line endings 2022-07-22 10:32:35 -05:00
Tim Donohue
ff0809a0cd Migrate docker-compose scripts for 5.x from https://github.com/DSpace-Labs/DSpace-Docker-Images/ . Some refactoring needed. 2022-07-22 10:32:35 -05:00
Tim Donohue
3f130fa308 Ensure scripts keep their necessary line endings 2022-07-22 10:32:35 -05:00
Tim Donohue
0428da8dab Download Ruby gems via different manner 2022-07-22 10:32:35 -05:00
Tim Donohue
059d6f6b17 Switch to new Sonatype settings for maven releases 2022-07-22 10:32:35 -05:00
Tim Donohue
349773a55c Update DuraSpace text/links to LYRASIS. Minor updates to POM to link to GitHub Issues and Google Groups 2022-07-20 14:19:16 -05:00
Tim Donohue
6220bcd9a4 Update LICENSE and NOTICE based on latest main (resync with main) 2022-07-19 17:12:16 -05:00
Kim Shepherd
5f72424a47 [DS-4133] Improve URL handling in Controlled Vocab JSPUI servlet 2022-01-14 13:12:46 +13:00
Hrafn Malmquist
3b0cdee734 Merge pull request #1701 from atmire/DS-2852
DS-2852 - Discovery label fix for authority display value
2021-07-19 23:54:37 +01:00
Hrafn Malmquist
6f781f2a2c Merge pull request #3274 from IordanisKostelidis/dspace-5_x
fixes restlet's blocked repository - dspace 5.x
2021-07-17 23:35:30 +01:00
Iordanis Kostelidis
3145c6abbf fixes DSpace#3247 2021-07-17 15:59:15 +03:00
Tim Donohue
d0434eb4ff Update build.yml to not limit by branch
Cherry-pick of #3072
2020-11-30 10:22:59 -06:00
Tim Donohue
09021ebe4a Merge pull request #3065 from tdonohue/github_ci_5x
Add CI to GitHub via Actions (based on Travis CI config) for 5.x
2020-11-25 15:05:29 -06:00
Tim Donohue
1d635c889d Remove .travis.yml 2020-11-25 13:26:39 -06:00
Tim Donohue
79246e9bb5 Change to only run for dspace-5_x branch 2020-11-25 13:26:05 -06:00
Tim Donohue
1ae611154a Initial GitHub CI based on Travis CI & backported from 6.x 2020-11-25 13:11:15 -06:00
Tim Donohue
690b614c25 Merge pull request #2245 from tdonohue/remove_unused_oai_dep
Remove unused jackson dependencies from DSpace 5.x OAI
2020-07-17 09:37:48 -05:00
kshepherd
c38e2527f0 Merge pull request #2605 from atmire/w2p-67450_Discovery-clean-index-fix
[DS-4393] Discovery clean index fix DSpace 5
2020-03-29 12:02:41 +13:00
kshepherd
0a2cb81e8f Merge pull request #2566 from 4Science/DS-3444
[DS-3444] JSPUI must keep shibboleth attributes on session renewal
2020-03-29 11:56:55 +13:00
Tim Donohue
f93f371b21 Update to LICENSE and NOTICE per LYRASIS merger 2020-03-27 12:19:41 -05:00
kshepherd
b71b453a2d Merge pull request #2565 from AndreaJenisSaroni/DS-4377
DS-4377 Fix Sherpa RoMEO layout
2020-02-19 23:53:54 +13:00
kshepherd
f478e5bfd5 Merge pull request #2679 from kshepherd/DS-3791_date_facets_5x
[DS-3791] XMLUI facet "yearDifference" fix (5.x port)
2020-02-19 00:49:36 +13:00
Jonas Van Goolen
d028ae3044 DS-3791 Make sure the "yearDifference" takes into account that a gap of 10 year contains 11 years 2020-02-19 00:39:56 +13:00
Kristof De Langhe
a8198257fb 67450: Discovery clean index fix 2019-11-27 11:33:54 +01:00
Pascal-Nicolas Becker
153b7f9dcf [DS-3444] JSPUI must keep shibboleth attributes on session renewal 2019-11-05 10:08:16 +01:00
Andrea Jenis Saroni
fc8fa661a0 DS-4377 Fix Sherpa RoMEO layout 2019-11-04 18:02:28 +01:00
Luigi Andrea Pascarelli
f2dfef10ce Merge pull request #2517 from atmire/DS-4342-5x
[DS-4342] improve the performance of the collections/collection_id/items REST endpoint
2019-10-28 18:42:41 +01:00
Terry Brady
5faf9adca7 Merge pull request #2518 from kshepherd/DS-4144_update_node_npm_mirage2_5x
[DS-4144] Update node and npm versions for mirage 2 (5.x)
2019-09-23 13:46:57 -07:00
kshepherd
ed014de4ca Merge pull request #2514 from 4Science/dspace-5_x
DS-4340 Duplicate Headers when bitstream has a comma in the title
2019-09-22 11:15:40 +12:00
Kim Shepherd
31600ffd3d [DS-4144] Update node and npm versions for mirage 2 dependencies 2019-09-17 14:28:42 +12:00
Philip Vissenaekens
c6c3299869 DS-4342 2019-09-16 11:36:26 +02:00
Andrea Bollini
fbe6dd6fbc DS-4340 Duplicate Headers when bitstream has a comma in the title (Chrome) - JSPUI Only 2019-09-09 09:38:28 +02:00
kshepherd
1aaf345fbb Merge pull request #2361 from Georgetown-University-Libraries/ds4167r5
[DS-4167] 5x Port: Migrate update-sequences.sql to `database` command
2019-09-07 11:59:34 +12:00
Terry Brady
e6ff477c6f Merge pull request #2512 from J4bbi/ds4336v5
[DS-4336] Point Ant to archived, stable URL
2019-09-06 12:26:10 -07:00
j4bbi
127bf2a43b Point Ant to archived, stable URL 2019-09-06 19:42:48 +01:00
Tim Donohue
c418e6f0a5 Merge pull request #2506 from Georgetown-University-Libraries/ds4336r5
[DS-4336] Update ant version in Docker Build (5x)
2019-09-06 18:06:08 +02:00
Terry Brady
12f4736808 update ant version 2019-09-05 10:51:23 -07:00
Tim Donohue
57bae82553 Merge pull request #2482 from mwoodiupui/2477-5_x
Travis CI: Continue to use Ubuntu Trusty 14.04
2019-08-06 23:16:24 +02:00
Mark H. Wood
75693d95ae Tell Travis to boot Trusty Tahr, not Xenial Xerus, so we get Java 8 not 11. 2019-08-06 16:29:35 -04:00
Bram Luyten
f7f61fd6c4 Merge pull request #1691 from samuelcambien/dspace-5_DS-3545
DS-3545 mirage2: custom sitemap.xmap is ignored
2019-07-05 14:39:33 +02:00
Terry Brady
ad94cd7196 fix seq sql 2019-03-09 17:36:45 -08:00
Terry Brady
7465ef67c5 fix typos 2019-03-09 17:03:16 -08:00
Terry Brady
961b958326 Initial port of PR 2348 2019-03-08 16:11:12 -08:00
Terry Brady
ef01ed688b Merge pull request #2345 from terrywbrady/ds4126d5
[DS-4126] 5x: Optimize docker builds
2019-02-20 13:16:46 -08:00
Terry Brady
8b56447d40 update comment 2019-02-07 07:42:07 -08:00
Terry Brady
d46ff561f2 comment typo 2019-02-07 07:41:16 -08:00
Terry Brady
46c7c2936c Update Dockerfile.jdk7-test 2019-02-06 16:18:50 -08:00
Terry Brady
1de03d0eea Update Dockerfile.jdk7 2019-02-06 16:18:16 -08:00
Terry Brady
d0f2a5744e resolve build clean issues 2019-02-06 15:21:50 -08:00
Mark H. Wood
36c5a94154 Merge pull request #2271 from AndrewBennet/dspace-5_x
[DS-4085] Update abdera-client dependency to 1.1.3
2019-02-06 16:05:53 -05:00
Terry Brady
ebfc93cf14 port pr2307 2019-02-06 12:18:23 -08:00
Terry Brady
25f0ff10d4 Merge pull request #2326 from J4bbi/ds4142_5x
[DS-4142] dspace-5x: update maven jdks for docker
2019-01-14 13:50:43 -08:00
kshepherd
caca40e45c Merge pull request #2330 from Georgetown-University-Libraries/dsMir2a
[DS-4115] Update bower version to fix Mirage2 build
2019-01-15 09:25:43 +13:00
Terry Brady
8399e8c768 Match bower version for 6x 2019-01-14 11:22:23 -08:00
Hrafn Malmquist
98c989210a update maven jdks for docker 2019-01-14 17:17:55 +00:00
kshepherd
99bfe8cf02 Merge pull request #2295 from DSpace/DS-4104-Avoid-crosswalk-of-incorrect-dates-for-google-scholar
DS-4104 Avoid crosswalking wrong dates for GS
2019-01-13 14:36:30 +13:00
kshepherd
c745b89fea Merge pull request #2316 from Georgetown-University-Libraries/ds4115
[DS-4115] Update SASS and Ruby ver
2019-01-04 10:23:27 +13:00
Terry Brady
3345c6b9a0 Note ruby ver in documentation. 2019-01-03 12:46:59 -08:00
Terry Brady
2d7aa1fbb4 Update SASS and Ruby ver 2019-01-03 11:12:41 -08:00
Bram Luyten
e8abd073fc DS-4104 Avoid crosswalking wrong dates for GS 2018-12-13 08:24:56 +01:00
Andrew Bennet
2f2a10e95e Update abdera-client dependency to 1.1.3 2018-11-22 16:12:08 +00:00
Tim Donohue
7c2e039c7b Remove unused, old dependencies 2018-10-19 21:48:12 +00:00
Tim Donohue
837b4f7882 Merge pull request #2233 from mwoodiupui/DS-4031
[DS-4031] Updated link to DRIVER guidelines.
2018-10-18 11:42:48 -05:00
Terry Brady
bc7707624a Merge pull request #2217 from terrywbrady/ds4012_5x
[DS-4012] port to 5x for Jdk7 and 8
2018-10-17 11:49:08 -07:00
Terry Brady
ca35e6086b pr comments 2018-10-10 08:34:55 -07:00
Mark H. Wood
8b7c2a872f [DS-4031] Updated link to DRIVER guidelines. 2018-10-05 14:15:54 -04:00
Terry Brady
3b40088c04 [maven-release-plugin] prepare for next development iteration 2018-10-04 13:48:07 -04:00
Terry Brady
eca7968be7 [maven-release-plugin] prepare release dspace-5.10 2018-10-04 13:47:58 -04:00
Terry Brady
d0d4708a08 Merge pull request #2230 from Georgetown-University-Libraries/license-5.10
5.10 Update License
2018-10-04 08:57:26 -07:00
Terry Brady
0a4c7c54dd Collapse additional entries 2018-10-04 08:42:03 -07:00
Terry Brady
9730d69c24 pull license merge rules from prod 2018-10-04 08:30:16 -07:00
Terry Brady
e2e6e17c23 add license 2018-10-04 08:02:43 -07:00
Tim Donohue
e1e7bf13ff Merge pull request #2221 from Georgetown-University-Libraries/ds-2948_5x
[DS-2948] Port to 5x
2018-10-03 10:11:51 -05:00
Terry Brady
52342f039d port from 6x, hide noise from snippets 2018-10-01 13:55:36 -07:00
Terry Brady
e06e7c99da tomcat ver sel 2018-10-01 10:29:58 -07:00
Terry Brady
f2d11ba1d1 Merge pull request #2220 from Georgetown-University-Libraries/ds4020
[DS-4020] fix statistics logging issue in DSpace 5.9
2018-10-01 08:29:27 -07:00
Terry Brady
0bfebb8366 fix config ref 2018-09-28 08:13:59 -07:00
Terry Brady
776e8976b6 comment update 2018-09-27 23:04:39 -07:00
Terry Brady
23eb3d4ef9 port to 5x for Jdk7 and 8 2018-09-27 16:35:37 -07:00
Tim Donohue
3017df42ba Merge pull request #2204 from AlexanderS/DS-3664-5_x
[DS-3664] ImageMagick: Only execute "identify" on first page
2018-09-17 09:41:56 -05:00
Alexander Sulfrian
4e8c7b578b ImageMagick: Only execute "identify" on first page
The Info object used to get the color format runs "identify" on the supplied
input file. If the file has many pages, this process might require some time.
"identify" supports the same syntax for the input file like the other
ImageMagick tools and we can simply restrict the pages by changing the input
file name.

This fixes DS-3664.
2018-09-17 15:05:28 +02:00
Terry Brady
aa2895a49e Merge pull request #2197 from atmire/DS-4000
DS-4000: REST API (v5.9) does not run
2018-09-04 08:17:29 -07:00
Philip Vissenaekens
556eaf826f DS-4000 2018-09-04 09:59:54 +02:00
Mark H. Wood
59d6fb7dcc Merge pull request #2104 from jmarton/DS-3938_5x
[DS-3938] Use JDK7-compatible PostgreSQL JDBC driver in DSpace 5.x
2018-08-22 15:57:43 -04:00
Tim Donohue
f355da6494 Merge pull request #2136 from terrywbradyC9/dockerDspace5x
[DS-3967] 5x - Migrate Dockerfile to DSpace/DSpace
2018-08-21 15:10:00 -05:00
Terry Brady
655b17a55c enable Mirage2 by default 2018-08-02 15:20:15 -07:00
Terry Brady
31d5e621b8 docker usage comment 2018-08-01 19:03:49 -07:00
Terry Brady
04ec8af4e5 move docker res 2018-08-01 16:02:52 -07:00
Terry Brady
841ba85cbb add usage link 2018-07-25 17:56:41 -07:00
Terry Brady
c3c339141a Migrate 5x Dockerfile 2018-07-25 17:16:45 -07:00
Jozsef Marton
c9a9337b1b [DS-3938] Use JDK7-compatible PostgreSQL JDBC driver 2018-06-29 15:53:20 +02:00
Tim Donohue
75ac9a882e [maven-release-plugin] prepare for next development iteration 2018-06-25 21:13:50 +00:00
Tim Donohue
be88702e4e [maven-release-plugin] prepare release dspace-5.9 2018-06-25 21:13:36 +00:00
Tim Donohue
b5e0777575 Merge pull request #2100 from kshepherd/DS-3936_bower_registry_needs_updated_5.x
DS-3936 bower registry needs updating (5.x port)
2018-06-25 13:48:50 -05:00
Tim Donohue
2b8a177c71 Merge pull request #2018 from the-library-code/DS-3886
DS-3886: Adopt tests to versioning and DOI-support
2018-06-25 10:40:31 -05:00
Kim Shepherd
ce6212bcfc fix typo in json bowerrc 2018-06-25 09:13:05 +00:00
Kim Shepherd
83f74f93a0 update bower registry to https://registry.bower.io as per official instructions 2018-06-25 09:12:57 +00:00
kshepherd
f002a3e885 Merge pull request #2062 from tdonohue/DS-3447-ORCID-v2-5.x-port
DS-3447: ORCID v2 integration (port to 5.x from PR#2039)
2018-06-25 12:01:13 +12:00
kshepherd
8b849559bc Merge pull request #2094 from tdonohue/DS-950-5.x
DS-950: OAI should respect metadata.hidden... config properties.
2018-06-23 13:41:24 +12:00
kshepherd
d00880481a Merge pull request #2020 from tdonohue/DS-3883-port-to-5.x
DS-3883: Speed up Item summary lists by being smarter about when we load Thumbnails/Bitstreams (port to 5.x)
2018-06-22 12:06:20 +12:00
Terry Brady
2a70460983 Merge pull request #1789 from helix84/DS-3245-dspace-5_x
DS-3245: CSV linebreaks not supported by Bulkedit - DSpace 5 backport
2018-06-21 18:04:50 -06:00
kshepherd
2bf85d9e8d Merge pull request #1968 from mwoodiupui/DS-3853
[DS-3202] PMH Responder returns 500 Internal Error if item is in no Community or no Collection
2018-06-22 10:21:07 +12:00
Tim Donohue
8ebdc626db DS-950: OAI should respect metadata.hidden... config properties. 2018-06-20 19:57:22 +00:00
Tim Donohue
f7509e95f0 Merge pull request #2092 from tdonohue/DS-3866-5.x
fixes for DS-3866 (for 5.x branch)
2018-06-19 15:51:48 -05:00
Tim Donohue
ac796e51ba Merge pull request #2090 from tdonohue/DS-3840-5.x
Fixes for DS-3480 (for 5.x branch)
2018-06-19 15:44:49 -05:00
Kim Shepherd
df2a6a59e5 fixes for DS-3866 2018-06-18 21:42:42 +00:00
Kim Shepherd
1008277c02 Fixes for DS-3480 2018-06-18 21:22:22 +00:00
Tim Donohue
1d91a47d23 DS-3447: ORCID v2 integration (port to 5.x from PR#2039) 2018-05-15 16:27:51 -05:00
Tim Donohue
b8e289ae73 Merge pull request #1855 from helix84/DS-3705-reference-fix-pagination-5_x
DS-3705 Recent Submissions in Reference theme completely covered up by navigation (5.x)
2018-05-02 10:45:04 -05:00
kshepherd
3fe47c95a2 Merge pull request #2011 from mwoodiupui/DS-3832-v5
[DS-3832] GeoIP-API(com.maxmind.geoip:geoip-api) needs to be replaced by GeoIP2 ( com.maxmind.geoip2:geoip2 )
2018-04-19 09:51:04 +12:00
Mark H. Wood
41ed0511ec [DS-3832] Fix cherry-pick mention of class renamed in v6. 2018-04-18 16:53:39 -04:00
Mark H. Wood
e0f8da6671 [DS-3832] Don't spew stack traces for simple exceptions. 2018-04-18 16:38:16 -04:00
kshepherd
0a89ba8834 Merge pull request #1806 from atmire/DS-3560
DS-3560 MathJax CDN provider change
2018-04-15 12:09:47 +12:00
Tim Donohue
8d81e825de DS-3883: If only including thumbnails, only load the main item thumbnail. 2018-04-12 16:15:44 +00:00
Tim Donohue
a0ea20bd18 DS-3883: Don't loop through original bitstreams if only displaying thumbnails 2018-04-12 15:37:13 +00:00
Pascal-Nicolas Becker
a392058032 DS-3886: Adopt tests to versioning and DOI-support 2018-04-12 13:59:17 +02:00
Tim Donohue
31a19e7084 Merge pull request #2005 from Georgetown-University-Libraries/ds3835r5
[DS-3835] Port PR 1951 to 5x
2018-04-02 10:10:54 -05:00
Mark H. Wood
4d2cde0bfb [DS-3832] DSpace 5 has fewer mocks, more classes using GeoIP. 2018-03-30 10:03:46 -04:00
Mark H. Wood
17b2d0a67e [DS-3832] Clean up more references to v1 database. 2018-03-30 09:29:11 -04:00
Mark H. Wood
9fc0ce0df7 [DS-3832] Fetch and use GeoLite v2 City database. 2018-03-30 09:27:42 -04:00
Mark H. Wood
a16168edbe [DS-3832] Resolve dependency convergence problems. 2018-03-30 09:24:04 -04:00
Mark H. Wood
b869a242d3 [DS-3832] Fix ElasticSearch too. 2018-03-30 09:17:41 -04:00
Mark H. Wood
246df7b265 [DS-3832] Recast test support classes. 2018-03-30 09:11:22 -04:00
Mark H. Wood
78e68f6cb7 [DS-3832] Upgrade to GeoIP2. 2018-03-30 09:10:49 -04:00
Terry W Brady
c01a12f3d8 port PR 1951 to 5x 2018-03-28 15:00:03 -07:00
Tim Donohue
88e7b322c0 Merge pull request #1970 from tdonohue/latest_postgres_jdbc_5x
DS-3854: Update to latest PostgreSQL JDBC driver
2018-03-21 13:49:14 -05:00
Tim Donohue
682b4b0043 Update to latest PostgreSQL JDBC driver 2018-02-26 20:37:07 +00:00
Tim Donohue
418fd92a4c Merge pull request #1965 from mwoodiupui/DS-3852
[DS-3852]OAI indexer message not helpful in locating problems
2018-02-26 09:31:01 -06:00
Mark H. Wood
4e30af2c0f [DS-3853] Detect null Collection and skip on. 2018-02-24 18:39:10 -05:00
Mark H. Wood
07b050cf7d [DS-3852] Give more information about the item just indexed, to help identify it in case of problems. 2018-02-24 16:52:33 -05:00
kshepherd
483d23ae82 Merge pull request #1962 from hardyoyo/DS-3839-revised-5_x
[DS-3839] moved the autoorient IM op to the top of the operations list
2018-02-20 16:05:09 +13:00
Hardy Pottinger
2d6fceed53 [DS-3839] moved the autoorient IM op to the top of the operations list, where it belongs 2018-02-19 17:45:03 -06:00
Tim Donohue
4a5649174f Merge pull request #1958 from hardyoyo/DS-3839-support-autoorient-for-imagemagick-thumbnails-dspace_5x
[DS-3839] backporting DSPR#1956 for dspace-5_x
2018-02-16 09:31:30 -06:00
Hardy Pottinger
75e100d97e [DS-3839] backporting DSPR#1956 for dspace-5_x 2018-02-16 08:15:11 -06:00
Hardy Pottinger
577f3e31fe [DS-3757] increase default clamav socket timeout to 6 minutes (#1886) 2017-11-27 10:27:40 -06:00
Lotte Hofstede
ff1f01224f DS-3560: update deprecated MathJax url 2017-11-21 11:05:14 +01:00
Ivan Masár
52ec272ee6 DS-3705 Recent Submissions in Reference theme completely covered up by navigation 2017-10-04 10:52:30 +02:00
Terry Brady
890b04689f [maven-release-plugin] prepare for next development iteration 2017-09-01 12:36:10 -04:00
Terry Brady
50eca14e9f [maven-release-plugin] prepare release dspace-5.8 2017-09-01 12:25:57 -04:00
Hardy Pottinger
51b74510b9 [DS-3674] copied over input-forms.xml to the test config folder 2017-08-15 14:46:12 -05:00
Terry Brady
f96185dcea Merge pull request #1817 from Georgetown-University-Libraries/ds3661
[DS-3661] ImageMagick PDF Processing Degraded with Changes in 5.7 release
2017-08-09 11:04:58 -07:00
Terry W Brady
98ac9ed3ce bypass color profile check if not configured 2017-08-07 12:22:45 -07:00
Tim Donohue
50ac3b6819 Pin versions of SASS and Compass that Travis uses 2017-07-13 16:59:57 +00:00
Tim Donohue
ec8e839ef5 [maven-release-plugin] prepare for next development iteration 2017-07-12 19:44:29 +00:00
Tim Donohue
3832acc63e [maven-release-plugin] prepare release dspace-5.7 2017-07-12 19:44:19 +00:00
Tim Donohue
12f978ecee Rollback LNI versions manually 2017-07-12 18:32:56 +00:00
Tim Donohue
9981dfcacd [maven-release-plugin] rollback the release of dspace-5.7 2017-07-12 18:21:52 +00:00
Tim Donohue
ec5056750f [maven-release-plugin] prepare for next development iteration 2017-07-12 18:03:12 +00:00
Tim Donohue
266d016653 [maven-release-plugin] prepare release dspace-5.7 2017-07-12 18:03:01 +00:00
Mark H. Wood
4f0e0aec5e Fix doc comments to evade error on finding 'Javascript' in them 2017-07-12 17:24:01 +00:00
Mark H. Wood
f673b8da37 Update copyright claim years for release 2017-07-12 17:23:20 +00:00
Tim Donohue
e55212c14f DS-3431 : Fix broken tests by adding missing H2 migration 2017-07-11 15:44:18 +00:00
Tim Donohue
9401d971f6 DS-3431 : Fix broken tests by removing nullifying of global eperson 2017-07-11 14:37:46 +00:00
Mark H. Wood
090b617c28 [DS-3431] Harden DSpace's BasicWorfklowService 2017-07-11 14:37:08 +00:00
Tim Donohue
504e2ae270 Merge pull request #1724 from atmire/DS-2359-5x
DS-2359 Error when depositing large files via browser (over 2Gb)
2017-07-08 05:58:05 +10:00
Tim Donohue
99683cb810 Merge pull request #1781 from atmire/DS-3595-5x
DS-3595: Language change causes "page not found" error in several forms on XMLUI
2017-07-06 07:22:34 +10:00
Tim Donohue
dab9bd40ed Merge pull request #1733 from samuelcambien/dspace-5-DS-3584
DS-3584 when editing an eperson, trying to change its email address is ignored if another user already has that email address
2017-07-06 07:08:01 +10:00
Tim Donohue
7bbeea2633 Merge pull request #1794 from atmire/DS-3563_Missing-index-metadatavalue-resource-type-id
DS-3563: Fix Oracle Flyway migration error
2017-07-06 01:33:45 +10:00
Tom Desair
0182392563 DS-3563: Fix Oracle Flyway migration error 2017-07-04 16:51:05 +02:00
Àlex Magaz Graça
1c3673b37d DS-3245: CSV linebreaks not supported by Bulkedit
When a multiline field contained empty lines, the importer stopped
reading the file. This reverts a change in 53d387fed to stop when the
end of the file has been reached instead.

Fixes https://jira.duraspace.org/browse/DS-3245
2017-06-30 10:11:43 +02:00
Lotte Hofstede
94faee2fd4 40648: fix for filters 2017-06-27 10:47:06 +02:00
Mark H. Wood
40d5f113a9 Merge pull request #1697 from tomdesair/DS-2748-5x_Improve-cocoon-page-not-found-page
DS-2748: Do not throw an exception in the PageNotFoundTransformer
2017-06-22 08:50:36 -04:00
Philip Vissenaekens
97b22916f4 DS-3595 2017-06-21 17:42:52 +02:00
Lotte Hofstede
98c38d38e1 Merge branch 'dspace-5_x' into DS-2852
Conflicts:
	dspace-xmlui-mirage2/src/main/webapp/templates/discovery_simple_filters.hbs
2017-06-14 11:53:07 +02:00
Pascal-Nicolas Becker
711b4e8a96 Merge pull request #1669 from alanorth/DS-3517
DS-3517 Allow improved handling of CMYK PDFs
2017-06-07 22:45:39 +02:00
kshepherd
778f9dfec0 Merge pull request #1700 from atmire/DS-3551-DB-connections-reduction
DS-3551 db connections reduction
2017-06-02 21:13:50 +12:00
Tim Donohue
15046de363 Merge pull request #1743 from arvoConsultores/DS-3281
DS-3281 - Submisisions made through REST API (POST/item method) use workflow
2017-05-31 13:52:30 -07:00
kshepherd
ed8b31721f Merge pull request #1706 from atmire/DS-3563_Missing-index-metadatavalue-resource-type-id
Ds 3563 missing index metadatavalue resource type id
2017-05-30 13:22:11 +12:00
aroman
9edd2cd218 DS-3281 - Submisisions made through REST API (POST/item method) don´t
get into workflow-approvals
2017-05-09 08:24:41 +02:00
Lotte Hofstede
4d28fa42cc 40648: DS-2852 - Nullpointer prevention 2017-05-04 11:44:49 +02:00
Jonas Van Goolen
a7bed3a293 DS-3551 Additional required mocking of method 2017-05-04 09:03:54 +02:00
Jonas Van Goolen
d917b3158d DS-3551 "ContextAwareDisseminationCrosswalk" + Javadoc expactations of DisseminationCrosswalk interface 2017-04-28 11:21:44 +02:00
Jonas Van Goolen
aef0f52a5b DS-3551 Javadoc update + Remove additional "null" param for getColumnNames method 2017-04-28 11:21:44 +02:00
Tom Desair
09713ea4a8 DS-3551 Added some deprecation warnings 2017-04-28 11:21:44 +02:00
Jonas Van Goolen
54f5cd87fc DS-3551: Reuse database connection when requesting item page or bitstream 2017-04-28 11:21:43 +02:00
Jonas Van Goolen
c8f62e6f49 DS-3583 Usage of correct Collection Array (#1731)
* DS-3583 Usage of correct Collection Array

* DS-3583 Reverting of import rearrangement
2017-04-26 12:16:25 -07:00
Tim Donohue
bdf665b07e Merge pull request #1738 from cjuergen/DS-3585-5_x
Fix for DS-3585
2017-04-26 11:08:05 -07:00
cjuergen
8a790dedd3 Fix for DS-3585 2017-04-26 14:44:11 +02:00
samuel
23aa21ae86 DS-3584 when editing an eperson, trying to change its email address is ignored if another user already has that email address 2017-04-26 11:37:22 +02:00
Tom Desair
9f531fb244 DS-3563: Conditional create index for Oracle 2017-04-24 11:05:53 +02:00
Mark H. Wood
5a1943cf22 Merge pull request #1718 from mwoodiupui/DS-3505-5x
[DS-3505] Bad redirection from logout action (ported to dspace-5_x)

We've seen several successful tests (see the Jira issue).  Merging.
2017-04-20 08:49:54 -04:00
Philip Vissenaekens
2a627d8bbd DS-2359 2017-04-20 14:19:46 +02:00
Terry Brady
632a55d894 Merge pull request #1709 from alanorth/DS-3516
DS-3516 ImageMagick PDF Thumbnail class should only process PDFs
2017-04-19 14:37:30 -07:00
Mark H. Wood
b434b999b7 [DS-3505] Bad redirection from logout action (ported to dspace-5_x) 2017-04-19 15:15:35 -04:00
Tim Donohue
3963d3929e Merge pull request #1712 from atmire/DS-3573-Filtername-in-XMLUI-Discovery-filter-labels
DS-3573: Filtername in XMLUI Discovery filter labels
2017-04-19 09:19:43 -07:00
Alan Orth
f81cdf5283 DS-3516 ImageMagick PDF Thumbnail class should only process PDFs
Input formats for ImageMagick mediafilter plugins should be defined
in dspace.cfg. Currently they implement SelfRegisteredInputFormats,
which causes the PDF filter to attempt to process other formats in
the ORIGINAL bitstream bundle.
2017-04-14 14:29:09 +03:00
Yana De Pauw
dd7502f758 DS-3573: Filtername in XMLUI Discovery filter labels 2017-04-14 13:03:01 +02:00
Tom Desair
d557c019f2 DS-3563 Added missing index on metadatavalue.resource_type_id 2017-04-11 16:41:05 +02:00
Lotte Hofstede
e010c2acff 40648: DS-2852 - Discovery label fix for authority display value 2017-04-06 16:10:11 +02:00
Tom Desair
7467741624 DS-2748: Do not throw an exception in the PageNotFoundTransformer but do return a 404 error code 2017-04-05 15:50:22 +02:00
Alan Orth
91a00e237c DS-3517 Allow improved handling of CMYK PDFs
Allow ImageMagick to generate thumbnails with more accurate colors
for PDFs using the CMYK color system. This adds two options to the
dspace.cfg where the user can optionally specify paths to CMYK and
RGB color profiles if they are available on their system (they are
provided by Ghostscript 9.x).

Uses im4java's Info class to determine the color system being used
by the PDF.

See: http://im4java.sourceforge.net/docs/dev-guide.html
2017-04-01 13:11:44 +03:00
samuel
274f41258c DS-3545 mirage2: custom sitemap.xmap is ignored
Conflicts:
	dspace/modules/xmlui-mirage2/pom.xml
2017-03-28 12:57:55 +02:00
Peter Dietz
f45252547d DS-3366 Fix handleresolver by removing out.close (#1560) 2017-03-08 12:24:10 -06:00
Tim Donohue
d2c123d8c9 Merge pull request #1528 from aschweer/DS-3336-sort-movedropdown-5_x
[DS-3336] Properly sort collections in move item drop-down
2017-03-08 11:46:30 -06:00
Tim Donohue
943619248a DS-3520 fix. Upgrade Commons Collections to 3.2.2 2017-03-06 15:14:36 +00:00
Bram Luyten
848aea9b27 Merge pull request #1667 from jonas-atmire/DS-3518-PasswordAuthentication-bugfix
DS-3518 5_X port of JIRA Ticket DS-2941
2017-03-04 08:08:39 +01:00
Jonas Van Goolen
b8e784f8c2 DS-3518 5_X port of JIRA Ticket DS-2941 2017-03-03 10:20:59 +01:00
Tim Donohue
0b9d05154c Workaround for travis-ci/travis-ci#4629 2017-02-27 21:44:57 +00:00
Tim Donohue
5a81ba0f3b Merge pull request #1616 from jonas-atmire/DS-3448-MultiSelect-in-Submission_5_x
DS-3448 Multi-select in submission for workflow and workspace items
2017-02-22 14:46:39 -06:00
Tim Donohue
63ab1f13f8 Merge pull request #1592 from samuelcambien/dspace-5-3425
DS 3425 outputstream gets closed in JSONDiscoverySearcher
2017-02-21 15:33:58 -06:00
Tim Donohue
19d8144faa Merge pull request #1591 from samuelcambien/dspace-5-3415
DS-3415 - administrative.js doEditCommunity wrong parameter name
2017-02-21 15:00:00 -06:00
Bram Luyten
7d53df0d6b DS-2840 changing log level to DEBUG sidebar facet
Changes INFO level sidebar facet transformer log entries to DEBUG
2017-02-18 14:21:31 +01:00
Luigi Andrea Pascarelli
2bd6c2e392 DS-3356 add turnoff authz system (#1552) 2017-02-15 16:13:36 -06:00
Terry Brady
b0e624d72c Port 6x changes by Tom D to 5x, make static method (#1645) 2017-02-09 11:26:27 -06:00
Tim Donohue
b37bd18c51 Merge pull request #1643 from Georgetown-University-Libraries/ds3458-5xA
[DS-3458]5x Allow Shard Process to Append to an existing repo
2017-02-09 08:41:59 -06:00
Terry Brady
51bb72da2a Attempt to force travis to rebuild 2017-02-08 14:31:19 -08:00
Terry W Brady
3c8ecb5d1f Re-apply ping fix 2017-02-08 11:24:28 -08:00
Tim Donohue
59302e9d6f Merge pull request #1640 from cjuergen/DS-3479-5_x
Fix for DS-3479 avoid adding empty metadata values during import
2017-02-08 10:16:41 -06:00
Terry Brady
567ec083c8 [DS-3456] 5x Clarify command line options for statisics import/export tools (#1623)
* Clarify command line options

* Allow overwrite flag for import and export

* Document -o behavior for overwrite option

* Allow export file overwrite during re-index

* Fix DS-3464 solr-reindex-statistics

* Allow safe import/export of multivalue fields

* force another commit

* Use UTC year to prevent Jan 01 stats creating prior year shard

* Remove overloading of the -o option

* Limit import files for -i statistics

* Add more context to warning messages

* whitespace
2017-02-08 09:45:09 -06:00
cjuergen
329f3b48a6 Fix for DS-3479 avoid adding empty metadata values during import 2017-02-07 11:58:41 +01:00
Tim Donohue
a52779c571 Merge pull request #1628 from Georgetown-University-Libraries/ds3468-5x
[DS-3468] 5x Ignore bin directory created by Eclipse
2017-02-01 14:40:19 -06:00
Terry Brady
9a0334da7f Ensure only top level exclusion of bin 2017-01-26 07:11:59 -08:00
Terry W Brady
8e4db1344e Ignore bin directory created by Eclipse 2017-01-25 11:26:32 -08:00
Jonas Van Goolen
a9b8cca20f DS-3448 Removal of unnecessary duplicate javascript file 2017-01-13 09:53:38 +01:00
Jonas Van Goolen
93f368ff6b DS-3448 Multi-select in submission for workflow and workspace items 2017-01-12 14:02:17 +01:00
Tim Donohue
fbc023019c Merge pull request #1609 from bram-atmire/DS-3289-5_x
DS-3289 Removing duplicate slashes
2017-01-09 09:16:48 -06:00
Bram Luyten
e00dc3d421 DS-3289 Removing duplicat slashes 2017-01-07 19:17:27 +01:00
Bram Luyten
77a4da32ec Merge pull request #1606 from 4Science/DS-3441-5x
DS-3441 READ permission on the Collection object not respected by the JSPUI (5_x)
2017-01-06 18:19:00 +01:00
Andrea Bollini
79014ed943 DS-3441 READ permssion on the Collection object not respected by the JSPUI 2017-01-06 14:11:02 +01:00
samuel
0dbaa81b54 DS 3425 outputstream gets closed in JSONDiscoverySearcher 2016-12-21 13:26:07 +01:00
samuel
c36e6f9f02 DS-3415 - administrative.js doEditCommunity wrong parameter name 2016-12-21 13:25:40 +01:00
Ivan Masár
f7b6c83e99 DS-3363 CSV import error says "row", means "column" 2016-11-14 18:28:35 +01:00
Luigi Andrea Pascarelli
2510609f68 [maven-release-plugin] prepare for next development iteration 2016-09-29 19:09:38 +02:00
Luigi Andrea Pascarelli
03724151be [maven-release-plugin] prepare release dspace-5.6 2016-09-29 19:09:29 +02:00
Luigi Andrea Pascarelli
52db795b72 Merge pull request #1541 from 4Science/DS-3347
DS-3347 manage empty configuration string for request a copy
2016-09-29 16:04:50 +02:00
Luigi Andrea Pascarelli
5f3f552078 DS-3347 manage empty configuration string for request a copy 2016-09-29 15:53:34 +02:00
Luigi Andrea Pascarelli
39f4db91da Merge pull request #1538 from 4Science/DS-3346-for-5
DS-3346 change deprecated setIgnoreAuthorization in favour of turnOff…
2016-09-28 23:23:15 +02:00
Luigi Andrea Pascarelli
04ba49ba56 DS-3346 change deprecated setIgnoreAuthorization in favour of turnOff/restore 2016-09-28 22:58:13 +02:00
Luigi Andrea Pascarelli
1aa92f8d00 Merge pull request #1531 from 4Science/DS-2623-porting
DS-2623 backport set description in the upload step files
2016-09-28 22:17:47 +02:00
Luigi Andrea Pascarelli
85f2195396 Reintroduce deprecated method referenced by dspace-lni for backward compatibility (related to DS-2604) 2016-09-26 19:44:44 +02:00
Luigi Andrea Pascarelli
c5cdedb0c6 Merge pull request #1522 from lap82/DS-2604-porting-cc_5
DS-2604 port - DSpace 5.x - from XMLUI to JSPUI the approach to queries Creative Commons service (via REST API)
2016-09-26 18:49:07 +02:00
Luigi Andrea Pascarelli
b805aaf1dd DS-2604 add some comment 2016-09-26 18:42:01 +02:00
Luigi Andrea Pascarelli
da315a4911 Merge pull request #1533 from 4Science/DS-3340
DS-3340 fix test
2016-09-26 17:45:15 +02:00
Luigi Andrea Pascarelli
ea4e3ee857 DS-3340 fix test 2016-09-26 17:29:55 +02:00
Luigi Andrea Pascarelli
1c4089c6b2 DS-2623 backport that fix the set description in the upload file and add the possibility to setdescription on multiple files 2016-09-26 14:58:27 +02:00
Andrea Schweer
e8a06006ae [DS-3336] Properly sort collections in move item drop-down 2016-09-26 11:58:49 +13:00
Luigi Andrea Pascarelli
9e0208fa96 DS-2604 manage double check for select_change and no_license option, fix no need to reach CC WS if select_change or no_license option selected 2016-09-23 00:14:41 +02:00
Luigi Andrea Pascarelli
76d6dec743 DS-2604 revert change to i18n key, manage selected option for select_change and no_license options 2016-09-23 00:13:26 +02:00
Luigi Andrea Pascarelli
427ba190a6 DS-2604 port from XMLUI to JSPUI the approach to reach the Creative Commons service (via REST API) 2016-09-22 15:28:15 +02:00
Luigi Andrea Pascarelli
bdd4eb20dc Merge pull request #1521 from lap82/DS-3248-porting-to-5
DS-3248 backport patch to enable expand parameter
2016-09-21 17:04:43 +02:00
Luigi Andrea Pascarelli
c7cbd44330 DS-3248 backport patch to enable expand parameter 2016-09-20 16:12:29 +02:00
Luigi Andrea Pascarelli
50a4f046d4 Merge pull request #1512 from 4Science/pdfbox-1.8.12
bump up to latest minor pdfbox version
2016-09-13 12:24:16 +02:00
Andrea Bollini
b5330b7815 bump up to latest minor pdfbox version 2016-09-13 11:29:31 +02:00
Mark H. Wood
4fed285c83 [DS-3097] Bitstreams of embargoed and/or withdrawn items can be accessed by anyone 2016-09-01 12:03:57 -04:00
Tim Donohue
9390016397 Merge pull request #1497 from wilee53/DS-3294-new
Fix DS-3294
2016-08-25 12:23:36 -05:00
Tim Donohue
b3c7f0a7f1 Merge pull request #1498 from abollini/DS-2895_5-x
Ds 2895 5 x
2016-08-24 10:44:46 -05:00
Bill Tantzen
8da8431869 remove unnecessary comments. 2016-08-22 09:41:05 -05:00
Andrea Bollini
2549e643f9 DS-28DS-2895 fix issue with the canEdit method
add unit test for the new isInProgressSubmission method
move the updateLastModified call after the actual change of the policies to avoid authorization exception on item and make the code more consistent
2016-08-20 11:19:25 +02:00
Andrea Bollini
ac0721767b DS-2895 add unit test to expose the wrong behaviour 2016-08-20 11:13:07 +02:00
Bill Tantzen
679c971ec3 replaced call to Boolean.getBoolean() (always returns false) with Boolean.valueOf() 2016-08-19 13:10:16 -05:00
Tim Donohue
b50d35d3f3 Merge pull request #1465 from tdonohue/DS-3266-and-DS-3140
Backporting of DS-3266 and DS-3140 for 5.x. Fix AIP Restore logic and add Integration Tests
2016-08-17 12:38:08 -05:00
Bruno Nocera Zanette
d6412e9af3 Fix: Bitstream's retrieval's response without filename
This adds bitstreams's filename to retrieval's response, and it fixes the bug that all bitstreams are downloaded using the same filename ("retrieve").
2016-08-15 14:59:00 +02:00
helix84
067c1b1a95 Merge pull request #1229 from cjuergen/DS-2968
DS-2968 5.x EPerson selection list sorting
2016-08-11 16:35:58 +02:00
oooriii
20026af124 DS-3206 Policy form merge field values when perform group search 2016-08-11 10:54:28 +02:00
Tim Donohue
b3f9ea0eaa AIP Integration Test stability improvements. Make sure tests use separate temp directories for AIPs, better NPE handling for AIP parsing. 2016-08-09 13:39:40 -05:00
Tim Donohue
987a16d23f Backport to 5.x: Fix ITDSpaceAIP to no longer re-use AIPs between tests. Ensure all tests are standalone. 2016-08-09 11:32:03 -05:00
Tim Donohue
43d44aa0cc Backport to 5.x: Fix AIP restoration of item with no policies attached. Add Integration Test to prove. 2016-08-09 11:20:54 -05:00
Tim Donohue
307d577b35 Backporting of DS-3266 and DS-3140 for 5.x. Also backports the integration tests to validate code is working 2016-07-18 13:01:50 -05:00
Hardy Pottinger
04c60ba939 [DS-3217] modified DCDateTest and InstallItemTest to use Calendar instead of Date for getting a date value for testing, explicitly set the timezone of the new calendar object to UTC, thanks to LuizClaudioSantos and Helix84 for the hints 2016-07-13 11:03:44 -05:00
Hardy Pottinger
462360ed4d [DS-3217] trying the suggestion from LuizClaudioSantos, use Calendar and not Date 2016-07-13 10:58:46 -05:00
Hardy Pottinger
c6fda557f7 [DS-3250] applying patch provided by Atmire 2016-07-06 16:16:38 -05:00
Tim Donohue
e73f83f7a4 Merge pull request #1438 from aschweer/DS-3246-cocoon-recycling-ds5
[DS-3246] Improve cleanup in recyclable components
2016-06-29 14:40:55 -05:00
Andrea Schweer
9f0f5940e7 [DS-3246] Improve cleanup in recyclable components 2016-06-15 14:33:35 +01:00
Mark H. Wood
88ed833e2c Merge pull request #1017 from bram-atmire/dspace-5_x
[DS-2702] Cannot send email using SSL (5.x branch)
2016-04-06 12:36:14 -04:00
Ivan Masár
91d4081b03 DS-2874 make XSD enforce missing Description element 2016-04-01 17:15:35 +02:00
Tim Donohue
d9e986d669 [maven-release-plugin] prepare for next development iteration 2016-03-18 15:32:25 -05:00
Tim Donohue
132f37a10a [maven-release-plugin] prepare release dspace-5.5 2016-03-18 15:32:20 -05:00
Mark H. Wood
98a26fa3e7 [DS-3094] Apply attached patch 2016-03-18 14:11:53 -04:00
Ivan Masár
4f5f5acdbe DS-3085 fix style in Mirage 2016-02-29 16:17:17 +01:00
AmberPoo1
212011cc75 Correct Sherpa/Romeo ungraded journal (gray) error message
https://jira.duraspace.org/browse/DS-3085

Resolved: DSpace DS-3085 (Ticket in the Space Issue Tracker),
Sherpa/Romeo ungraded journal (gray) shows error
2016-02-29 16:17:14 +01:00
Andrea Bollini
e7b49d8310 Merge pull request #1303 from abollini/dspace-5_x
DS-3063 Add missing license headers
2016-02-16 11:58:25 +01:00
Andrea Bollini
a70f0bdd22 DS-3063 Add missing license headers 2016-02-16 11:50:42 +01:00
Andrea Bollini
a84763a258 DS-3063 Ensure proper access to news files 2016-02-16 10:53:08 +01:00
Ivan Masár
5a1028a7a9 DS-2517 replace erroneous sql column with correct column
fixes previous commit
2016-02-15 15:26:38 +01:00
helix84
16b123e9df Merge pull request #1300 from DylanMeeus/dspace-5_x-sqlfix
replace erroneous sql column with correct column
2016-02-15 14:32:05 +01:00
dylan
f057ed8c07 replace erroneous sql column with correct column 2016-02-15 14:18:21 +01:00
Christian Scheible
875bb59eb0 [DS-2820] fixes XOAI Not filter 2016-02-12 00:06:59 +01:00
cjuergen
2c09aea8fd DS-3050 XOAI wrong URL encoding 2016-02-09 13:02:34 +01:00
Christian Scheible
533245c8dd [DS-2426] added possibility to use relative import in oai xslt transformers 2016-01-30 17:05:58 +01:00
AmberPoo1
875bba3add Correct dcterm "dcterms.conformsTo" in registry configuration
Resolved: DSpaceDS-2998, Incorrect metadata element
"dcterms.comformsTo" in dspace registry configuration.
Ticket in the Space Issue Tracker: DS-2998
2016-01-22 08:45:46 +01:00
Ivan Masár
55e623d1c2 fix X-Forward-For -> X-Forwarded-For in dspace.cfg 2015-12-30 09:36:43 +01:00
cjuergen
81a6d173ca Fix for DS-2968 2015-12-23 14:00:09 +01:00
helix84
3ff604742b Merge pull request #1209 from bram-atmire/DS-2936
DS-2936 REST-API /handle endpoint broken
2015-12-14 13:23:09 +01:00
Hardy Pottinger
3bfe7b8ea8 Merge pull request #1215 from bram-atmire/DS-2946-2
Adding class to ensure REST API can register itself during startup
2015-12-11 15:21:46 -05:00
Bram Luyten
ee62f9d6f0 Adding class to ensure REST API can register itself during startup 2015-12-11 00:22:16 +01:00
Bram Luyten
be35b0450b Removing problematic static context 2015-12-07 10:31:02 +01:00
Bram Luyten
8c94edc29c DS-2936 Adding breaks and context completion 2015-12-06 20:00:47 +01:00
helix84
2bf0275678 Merge pull request #1199 from TimothyXL/DS-2893
DS-2893 Mirage2: printing an item page includes the URL to the bitstreams
2015-11-30 16:43:42 +01:00
Tim Van den Langenbergh
86ca33eaa3 DS-2893
Quick fix for printing URLs.
2015-11-30 10:36:56 +01:00
Pascal-Nicolas Becker
f64d4b3367 DS-2923: Update DataCite default configuration. 2015-11-27 12:39:20 +01:00
Tim Donohue
c908997900 [maven-release-plugin] prepare for next development iteration 2015-11-09 14:33:39 -06:00
Tim Donohue
e2dd1089c9 [maven-release-plugin] prepare release dspace-5.4 2015-11-09 14:33:33 -06:00
Peter Dietz
8809150e66 Merge pull request #1116 from arnodb/rest-dto-sanity-5_x
DS-2829: Add the logo setter to the Community DTO - 5_x
2015-11-06 11:32:40 -05:00
Jonas Van Goolen
1fd2723848 Addition of spacing to fix gear icon sticking to results 2015-11-06 10:17:09 -06:00
Jonas Van Goolen
454f40b3f4 DS-2637 Retention of offset when changing the rpp
The offset which was obtained before changing the rrp is kept instead of snapping to first page

Based on the jira-ticket + comment
https://jira.duraspace.org/browse/DS-2637#comment-45376
2015-11-06 10:16:55 -06:00
Jonas Van Goolen
f05c9e794f DS-2637 Possibility for users to configure the discovery browse page on the fly
Modifications to the following files:
-searchFacetFilter.java
-browse.js
-attribute-handles.xsl
-browse.xsl
,enable the user to change the pagination through the gear icon, as was already the case for other browse pages

The was an improvement based on the following Jira ticket
https://jira.duraspace.org/browse/DS-2637
2015-11-06 10:16:41 -06:00
Ivan Masár
56fc41cac3 DS-2871 fix viewport meta tag 2015-11-06 11:05:08 -05:00
Ivan Masár
0175e5edff DS-2871 mobile theme: correct order of imports 2015-11-06 12:15:47 +01:00
Hardy Pottinger
d17886c1cd Merge pull request #1151 from tdonohue/DS-2869
DS-2869 : Refactor SolrServiceImpl to always specify a list of fields for Solr to return
2015-11-05 22:27:34 -05:00
Chris Wilper
06668c363e DS-1207 Stop throwing ResourceNotFoundException for redirects 2015-11-05 17:37:13 +00:00
Tim Donohue
4b3a07120c DS-2869: Ensure all Solr queries specify fields to return. Refactor slightly to use global constants for objId fields. Add comments to DiscoverQuery. 2015-11-05 10:37:11 -06:00
Tim Donohue
50c4a54bd6 Remove odd characters from search schema config 2015-11-05 10:36:25 -06:00
Andrea Schweer
0aabf5d780 DS-2699 Only escape colon-space, not other characters
Escaping all characters with special meaning in Solr query syntax prevents
users from doing advanced searches, including those with explicit operators
(AND/OR), range queries and NOT-type queries. The colon character quite
commonly occurs in titles of research publications, where it is typically
followed by a space. Unfortunately, it's regarded as a special character by
Solr and would normally need to be escaped when it's just part of a phrase
search (not part of a fielded search). Escaping the colon character _only_ when
it's followed by a space character still allows the user to manually enter
fielded queries (eg -fulltext:[* TO *] to find all items without fulltext). At
the same time, queries where someone pastes in a publication title containing
colon-space still "just work".
2015-11-04 18:31:07 +00:00
helix84
04ce6ff2f4 Revert "DS-1821 Internationalize the bitstream access icon alt text" 2015-11-04 16:43:03 +00:00
Peter Dietz
1f8f6241c2 fix /{collection_id}/items to properly process offset parameter 2015-11-04 11:22:21 -05:00
Peter Dietz
4a2f392ed8 Merge pull request #1122 from arnodb/DS-2831-database-connections-cleanup
DS-2831 connections cleanup and context reuse
2015-11-04 11:14:04 -05:00
Peter Dietz
fac705ec3f Merge pull request #1121 from arnodb/DS-2830-rest-authentication-safety
DS-2830 add proper synchronization in TokenHolder
2015-11-04 10:25:02 -05:00
Tim Donohue
e1263249f5 Merge pull request #1146 from tdonohue/DS-2542
DS-2542: Fix bug for day granularity (from zuki's PR#912)
2015-11-04 09:12:58 -06:00
Pascal-Nicolas Becker
553b1a72c5 Merge pull request #1145 from tdonohue/DS-2736
DS-2736 and DS-2737: Ensure all string parameters are properly escaped in results
2015-11-04 15:46:48 +01:00
Tim Donohue
6242865207 DS-2737: Escape message keys which are passed in as url params 2015-11-03 15:45:26 +00:00
Tim Donohue
59fa31641a DS-2542: Fix bug for day granularity (from zuki's PR#912) 2015-11-03 14:47:35 +00:00
Tim Donohue
58344b610f DS-2736: Ensure all string parameters are escaped in results 2015-11-02 22:26:23 +00:00
Andrea Schweer
563d90f7c4 [DS-2858] Revert "DS-2424 workaround for bug in xoai library. changed ref to red for Filter in Contexts"
This reverts commit 16b45601f1.

The workaround is no longer needed with xoai 3.2.10.x, where the parsing bug is no longer present.
2015-11-02 21:01:35 +00:00
Andrea Schweer
131555604a [DS-2865] Upgrade xoai dependency version & groupId 2015-11-02 20:26:03 +00:00
Andrea Schweer
fbde108024 Merge pull request #1049 from pmarrone/DS-2744-oai-base-url
[Ds-2744] Accept the dspace.oai.url as baseUrl for Dspace OAI
2015-11-02 22:29:36 +13:00
Andrea Schweer
2c59a9dd35 Merge pull request #1132 from tdonohue/DS-2408-5x
DS-2408: Fix ordering of listeners in web.xml to ensure Kernel always starts before DB migrations happen
2015-11-02 22:09:05 +13:00
Andrea Schweer
d307c56d07 DS-2698 Use all result info in browse page cache validity
Stepping through the code in a debugger showed that the singleEntry arrays
typically hold three entries: the metadata value in [0], the authority key (if
present) in [1] and the frequency of this metadata value in the current browse
in [2]. The validity used to take into account only [0] and [1], probably from
the old lucene browse (which didn't include frequencies).

This change ensures that the cache validity object includes all entries in the
singleEntry arrays by avoiding to pick out individual array entries.
2015-10-28 20:51:11 +00:00
Peter Dietz
1d2b954889 Merge pull request #1007 from ufal/DS-2692
DS-2692 REST-API /handle not reflecting updates
2015-10-28 15:29:11 -04:00
Tim Donohue
69cfc61167 DS-2408: Fix ordering of listeners in web.xml to ensure Kernel always starts before DB migrations. Add comments to others. 2015-10-28 15:51:01 +00:00
bill
b944ceb112 add left hand variable, 'fileDescription', to catch a value. 2015-10-28 15:32:15 +00:00
Jonas Van Goolen
9885ed851a DS-2733 Erronous string comparing fix 2015-10-28 15:25:01 +00:00
Peter Dietz
52ce1eb52b Merge pull request #1100 from akotynski/DS-2784
[DS-2784] jersey version changed to 1.19
2015-10-28 11:15:31 -04:00
Tim Donohue
deeef45943 Limit requiresIndexing() query to only returning the LAST_INDEXED_FIELD 2015-10-28 14:49:17 +00:00
Hardy Pottinger
ad21875ac8 [DS-2826] changed arrow characters for navigation to improve legibility,
increased the navigation size to be .larger
2015-10-27 17:20:05 -05:00
Arnaud de Bossoreille
4ee79a3d89 Add the logo setter to the Community DTO
That fixes potential deserialization issues.
This is covered by a test on the generated JAXB schema.
2015-10-22 22:21:23 +02:00
Arnaud de Bossoreille
c01c3af153 DS-2831 connections cleanup and context reuse 2015-10-22 17:28:41 +02:00
Arnaud de Bossoreille
f493a475fd DS-2830 add proper synchronization in TokenHolder 2015-10-22 17:02:57 +02:00
Hardy Pottinger
a3a5f562c9 [DS-2706] removing configuration for more lib folders in authority solr config (dist and contrib) to match Discovery/search solr config 2015-10-21 17:49:45 +00:00
Hardy Pottinger
3479b0a254 [DS-2706] removing more unused lib configurations from authority solr config 2015-10-21 17:49:35 +00:00
Hardy Pottinger
39289b6762 [DS-2706] removing more unused lib configurations from authority solrconfig 2015-10-21 17:49:24 +00:00
Hardy Pottinger
edf7ea6524 removed unused example lib folders from the Authority Solr config 2015-10-21 17:49:14 +00:00
Pascal-Nicolas Becker
2045fee8ab Fixes html5 file upload during workflow (if allowed by configuration). 2015-10-21 17:47:30 +00:00
Christian Scheible
bac9beaffa [DS-2813] fixed NullPointer in submission lookup if pubmed is down. 2015-10-19 13:44:11 +02:00
Mark H. Wood
569ad5f546 [DS-2502] Correct some dependencies to prevent pulling in servlet-api 2015-10-16 19:32:05 +00:00
Andrea Schweer
b465f26646 [DS-2591] Fix link to respond.min.js 2015-10-16 13:46:43 +02:00
Andrea Schweer
ad19c3aeb6 [DS-2592] Fix OpenUrl description.xml link 2015-10-16 08:09:58 +02:00
Hardy Pottinger
34c20d49ad [DS-2790] removed duplicate log4j config lines for Solr 2015-10-13 14:14:24 +00:00
aleksanderkotbury
eaa08adb62 [DS-2784] jersey version changed to 1.19 2015-10-09 16:55:13 +02:00
Peter Dietz
15f3c247bc DS-2740 Conditionally show publisher for sherpa romeo, if present 2015-10-08 09:54:17 +02:00
Brad Dewar
2a44765f39 Add dc.creator to qdc.xsl output 2015-10-07 19:41:39 +00:00
Brad Dewar
87c34f1f1c Add dc.creator to qdc.xsl output 2015-10-07 19:41:29 +00:00
Andrea Schweer
fce84880bc DS-1924 Include current locale in calculating cache key
This ensures that the value of the 'current locale' page meta element reflects locale switches made by the user.
2015-09-28 12:48:30 +02:00
Roeland Dillen
3f94c3acb4 set mail.extraproperties before obtaining session 2015-08-13 22:48:43 +02:00
Ondřej Košarko
50cb865ea2 Extend Handle from Resource and use createContext similarly to other *Resource 2015-08-06 14:48:47 +02:00
RomanticCat
a9b8d8bfbc DS-2533 : update version for Eirslett - forntend-maven-plugin 2015-08-06 09:30:12 +02:00
Mark H. Wood
600f680cd6 Make the assembly descriptor schema-valid. 2015-08-04 21:07:58 +00:00
Mark H. Wood
01d7d060d7 [DS-2694] Avoid munging more binary types during release 2015-08-04 21:07:49 +00:00
Christian Scheible
4a6663c2f4 [DS-2679] Retain ordering of authors for Google Scholar metatags. 2015-08-04 13:13:55 +02:00
Jonas Van Goolen
b3c87b2be7 DS-2672 Typos in constants in DOIIdentifierProvider 2015-08-03 14:17:54 +02:00
Kim Shepherd
ac08b6a4e3 [maven-release-plugin] prepare for next development iteration 2015-07-29 09:30:59 +00:00
Kim Shepherd
a2f5fe34eb [maven-release-plugin] prepare release dspace-5.3 2015-07-29 09:30:55 +00:00
Kim Shepherd
ace19199e5 Updates to license files for 5.3 2015-07-29 08:18:54 +00:00
Tim Donohue
6d9fa26535 Minor fix. Since addBitstream() inherits polices, we need to first remove inherited custom policies 2015-07-28 11:42:41 -05:00
Pascal-Nicolas Becker
3efe549774 DS-2358: Preserves custom policy rules during versioning 2015-07-28 11:42:31 -05:00
Andrea Schweer
734744ec4f DS-2571 Fix jumping to value in descending browse 2015-07-23 14:37:44 -05:00
rradillen
829c30bab4 change default of findAuthorizedPerformanceOptimize
it is now false
2015-07-22 19:36:04 +00:00
rradillen
83cb04ed53 move Collection policy optimization property 2015-07-22 19:35:54 +00:00
rradillen
0911d60290 [DS-2527] Disable collection authorisation enumeration optimisation
Disable collection authorisation enumeration optimisation so LDAP and Shibboleth may work out of the box.
2015-07-22 19:35:43 +00:00
Pablo Buenaposada
9bb7036857 closing more span tags
Missing closing more span tags
2015-07-22 19:24:13 +00:00
pablobuenaposada
e0368f3ade fixed dots in upper pagination 2015-07-22 19:22:59 +00:00
Tim Donohue
660217c3f9 Add a new DSpaceWithdrawnFilter in place of hardcoding in DSpaceAuthorizationFilter. Update configs, and fix bugs. 2015-07-22 19:13:28 +00:00
Tim Donohue
5f13b8cc64 Update XSL stylesheet to display "deleted" status, and correct misspellings 2015-07-22 19:13:15 +00:00
Tim Donohue
a2caabc79a Fix DS-2593 : Withdrawn items are now given a "tombstone" in OAI-PMH. Also fix Context mgmt issues & authorization code. 2015-07-22 19:13:05 +00:00
Hardy Pottinger
cb9710cda4 Merge pull request #993 from ufal/DS-2658
DS-2658: Fix wrong mapping for dc metadata in html head
2015-07-22 14:07:55 -05:00
Ondrej Kosarko
56abebaece fixes DS-2658 2015-07-17 10:13:37 +02:00
Pascal-Nicolas Becker
0310db74aa DS-2614: Ignore custom resource policies for unfinished items 2015-07-15 14:48:10 +00:00
Tim Donohue
3e1bac69df Escape special characters in JSPUI queries as well 2015-07-15 14:19:21 +00:00
Tim Donohue
ec86af5a82 Also ensure query escaped in AbstractSearch 2015-07-15 14:19:11 +00:00
Tim Donohue
79e111996b Fix DS-2602 and DS-2461 by properly escaping user entered queries 2015-07-15 14:19:00 +00:00
Tim Donohue
f4c6f2680c Revert "DS-2461 Escape some colons in queries"
This reverts commit 2575d73b2d.
2015-07-15 14:18:49 +00:00
nicolasschwab
f3487be040 DS-2603: now if the item doesn't have a primary bitstream the value of citation_pdf_url metadata will be a link to the first public bitstream according to the order established by the user. 2015-07-07 16:35:41 +00:00
Tim Donohue
87d0770974 Merge pull request #979 from bram-atmire/dspace-5_x
DS-2560 - Did you mean option missing in Mirage2 (backport to 5.x branch)
2015-07-06 17:24:13 -04:00
junwei1229
1c9fa656aa fix the oai index issue when submitter is null
when submitter is null, it will cause NPE exception, so it will stop the OAI index process. eg. if using harvest, the submitter probably will be null in db.
2015-07-06 21:04:22 +00:00
cjuergen
59ff964f4f Fix for DS-2543 cleans the cached OAI responses after doing a full item import with the -c option 2015-07-06 15:42:00 +00:00
Hardy Pottinger
10c4661885 Merge pull request #955 from Mini-Pillai/DS-2560
DS 2560 - Did you mean option missing in Mirage2
2015-06-30 08:46:48 +02:00
Ivan Masár
afe9c1294f test DSpace build on Travis container infrastructure 2015-06-23 13:45:00 +00:00
Bram Luyten
7a54972ed1 Merge pull request #965 from ufal/DS-2620
DS-2620 typo in the word cocoon
2015-06-20 14:33:00 +02:00
Ondrej Kosarko
b2cb0ef4dd typo in the word cocoon 2015-06-18 10:02:07 +02:00
Àlex Magaz Graça
5edf641d6c DS-2594 Long file names overlap the second column of item metadata in Mirage 2 2015-06-16 14:48:14 +02:00
Roeland Dillen
d9b14a86f0 DS-2618: Process mail.server.disabled in test-email 2015-06-16 14:43:21 +02:00
Andrea Schweer
7b8fa49632 DS-2598 Correct XPATH for available date in mets format xoai
This ensures that dc.date.available is shown when using the mets metadata
format in OAI-PMH. Previously, the dateAvailable element was present but empty.
2015-06-02 15:33:36 +02:00
Mark H. Wood
b5540d5999 Merge pull request #951 from mwoodiupui/DS-2590
[DS-2590] Fix multiple issues with distributed archives.
2015-05-26 12:29:19 -04:00
Mark H. Wood
494ff0c4c1 [DS-2590] Improved commentary about these obscure problems. 2015-05-26 12:14:58 -04:00
Mark H. Wood
1c4c8943a9 [DS-2590] Roll back problematic upgrade of maven-assembly-plugin 2015-05-26 11:53:57 -04:00
Mark H. Wood
5cd56fb834 [DS-2590] Fix multiple issues with distributed archives.
Include dspace/modules/*/src/main/webapps so build succeeds.
Avoid damaging a sample ZIP archive by munging "line endings".
Upgrade to maven-assembly-plugin 2.5.4 (which uncovered the line
ending problem).
2015-05-22 14:45:34 -04:00
Ondřej Košarko
ed89d6b00e DS-2587 Resource policies rptype is null after upgrading 2015-05-21 17:18:46 +02:00
Hardy Pottinger
19b28f4734 [maven-release-plugin] prepare for next development iteration 2015-05-20 11:42:40 -05:00
Hardy Pottinger
4a8fdf6843 [maven-release-plugin] prepare release dspace-5.2 2015-05-20 11:42:37 -05:00
Tim Donohue
d040b9dd4e Fix DS-2582: Remove all milliseconds from dates. Refactor code slightly. 2015-05-19 14:01:32 -05:00
Ondřej Košarko
4036bf781a DS-2020 null check & turning _ to / in handles 2015-05-18 11:34:55 -05:00
Antoine Snyers
d011e24f74 DS-2529 CSV import bugfix for fields under authority control with a language 2015-05-18 11:07:57 -05:00
Tim Donohue
0e9f78e9df Fix for DS-2577. Query for PostgreSQL constraint name. Surround with double quotes if value is $1 or similar. 2015-05-15 18:18:27 +00:00
Christian Scheible
254097b2e2 [DS-2546] added missing curly brackets 2015-05-15 18:01:52 +00:00
Christian Scheible
8049cef23b [DS-2546] Added missing ZULU time zones where applicable 2015-05-15 18:01:43 +00:00
Christian Scheible
de842dbf30 [DS-2546] fixes problem in DateUtils parsing 2015-05-15 18:01:33 +00:00
Ivan Masár
8bcac58154 minor fix: remove extra colon in string 2015-05-15 10:07:21 +02:00
Àlex Magaz Graça
511b78277f [DS-2186] Request item copy doesn't always use RequestItemAuthorExtractor 2015-05-15 16:37:18 +12:00
KevinVdV
dbd019943a [DS-2131] SWORDv2 ingestion fails with NullPointerException when replacing a non archived item 2015-05-15 15:07:57 +12:00
Pascal-Nicolas Becker
7d8a9d5636 DS-1965: Adds date fields to the form used to edit policies (JSPUI). 2015-05-15 10:30:36 +12:00
ctu-developers
2ab6b10a03 Removed unnecessary changes from previous commit. 2015-05-14 16:42:45 -05:00
ctu-developers
cd7789e8df Fix of getLink() by returning servlet context.
Added static method to Resource.java and using it in DSpaceObject.java
2015-05-14 16:42:35 -05:00
Ivo Prajer
9287aa891f Quick fix of getLink() 2015-05-14 16:42:22 -05:00
Pascal-Nicolas Becker
a99203382c DS-2551: JSPUI show thumbnails only if user has read permission 2015-05-14 16:32:21 -05:00
tmtvl
6ec649df78 DS-2562, fix incorrect if statement. 2015-05-14 21:22:29 +00:00
Mark H. Wood
e9f4e4c2cc [DS-2379] Lists returned by JDOM can't be sort()ed, so use a more cooperative class. 2015-05-14 13:40:50 -04:00
Mark H. Wood
18cc6bb3ff [DS-2379] Sort the list of commands by their names 2015-05-14 13:40:50 -04:00
ctu-developers
8094d8fe18 DS-2511: Repaired resource policy endpoints
Repaired all endpoints in REST api.
Added XML annotation in ResourcePolicy.
Repaired bug in Bitstream with expand field.
Repaired creating ResourcePolicy with bitstream.
2015-05-14 16:21:28 +02:00
Andrea Schweer
b7a469d53c DS-2575 Ensure pooled/owned workflow task are listed in fixed order 2015-05-14 15:40:52 +12:00
Andrea Schweer
f168c6c33d DS-2461 Escape some colons in queries
This allows searching for titles with colons while still allowing fielded searches
2015-05-14 11:13:37 +12:00
Pascal-Nicolas Becker
981b62d9e9 DS-2545: Show collections the user can submits items to only.
The JSPSelectColletionStep and JSPStartSubmissionLookupStep already set
the collections -the user can submit to- as request attribute. The JSPs
us these argument already. This commit lets the SelectCollectionTag use
this attribute too instead of looking for the collections on itself.
2015-05-13 17:10:09 -05:00
Andrea Schweer
2c42d71a6a DS-2544 Delete temp files when exception is encountered
As suggested by William Tantzen on Jira.
2015-05-14 08:48:52 +12:00
Andrea Schweer
ca6bc57c6d [DS-2544] Improve temp file handling for IM thumbnailer 2015-05-14 08:48:52 +12:00
Andrea Schweer
0f0be17d0a [DS-2549] Render Repo identifier / Sample identifier on Identify page 2015-05-14 08:44:06 +12:00
Panagiotis Koutsourakis
5e5a7922d0 Fix a bug in the "Jump to" browse feature
When computing the offset for the "jump to" feature at
SolrBrowseDAO.doOffsetQuery we should take into account if we are
browsing a subset of the items (e.g. we are viewing the items that have
a specific subject) and not all of them (e.g. by title).
2015-05-13 15:42:07 -05:00
Andrea Schweer
bb4cb39373 Remove box styling for file upload in Mirage 2 2015-05-14 08:39:24 +12:00
Andrea Schweer
a257f516fa DS-2449 Restore template item label for Mirage 2 2015-05-14 08:39:24 +12:00
Andrea Schweer
9d8284d85f [DS-2212] Ignore _version_ field when sharding solr stats 2015-05-13 15:13:35 -05:00
Christian Scheible
57efa4f628 [DS-2423] Added oai overlays to classpath to ensure that oai command line tools work (like clean-cache) 2015-05-06 18:49:56 +02:00
Christian Scheible
5b5f44085a [DS-2423] changed arcitecture of DSpace filters. Removed need for @Autowire because filters are not handled by spring. 2015-05-06 18:49:56 +02:00
Christian Scheible
46ce2741bc [DS-2423] changed tests for OAI-interface to autowire the Filters. 2015-05-06 18:49:56 +02:00
Christian Scheible
0b799fc882 [DS-2423] Added possibility to create additional Filter for DSpace OAI-PMH interface 2015-05-06 18:49:56 +02:00
Andrea Schweer
04b57a60b3 [DS-2486] Increase robustness, improve directory delete behaviour
As suggested by Mark Wood, delete directories only when they were actually
created (and when --keep is not given).

Also refuse to go ahead with reindexing when it's obvious that there won't be
enough space, plus deal a little more gracefully with common errors (initial
export failing; -temp core still left from previous attempt).
2015-05-01 12:02:04 -05:00
Andrea Schweer
02b4314046 Disable version check when importing temporary stats data 2015-05-01 12:01:48 -05:00
Andrea Schweer
3d79fa76ab Make import/export of temporary core more robust 2015-05-01 12:01:33 -05:00
Andrea Schweer
ca1803ae93 Use stats component to get minimum date 2015-05-01 12:01:21 -05:00
Andrea Schweer
9046ec21d4 Export monthly batches to limit the number of docs to sort 2015-05-01 12:01:06 -05:00
Andrea Schweer
b30654e3d5 DS-2486 Add Solr import/export to launcher.xml 2015-05-01 12:00:53 -05:00
Andrea Schweer
ee19e11e6d DS-2486 New scripts to export/clear/import solr indexes 2015-05-01 12:00:34 -05:00
Andrea Schweer
a990c97959 DS-2486 Add UUID processing to CSV, JSON update handlers too 2015-05-01 12:00:18 -05:00
Pascal-Nicolas Becker
56816b13ba Merge pull request #926 from tuub/DS-2550-dspace-5_x
DS-2550: fix ImageMagick/Ghostscript problems with transparent pdfs
2015-04-27 14:30:25 +02:00
Pascal-Nicolas Becker
b414aaa195 DS-2550: fix ImageMagick/Ghostscript problems with transparent pdfs 2015-04-27 14:24:00 +02:00
Ivo Prajer
1a1ae35ec9 DS-2218: Unable to use command "update-handle-prefix"
* Removed extra semicolon in the sql command
* added check for "up-to-date"
* fix updating metadata values
* basic logging to DSpace log and SQL exception handling
* Changed, customized, added user message and repaired their order.
* Fixed return codes and some typos.
* Changed re-index engine from DSIndexer to Discovery and info text about manual re-indexing.
* Changes in SQL formatting and formatting of code.
2015-04-22 11:28:46 +02:00
Ivo Prajer
1029f393e4 Fix passing parameters LIMIT and OFFSET to sql query in the method getItems() for oracle 2015-04-20 10:47:22 +02:00
Ivo Prajer
c1039dfe26 Fix passing parameters LIMIT/OFFSET to sql query in the findAll() 2015-04-20 10:47:11 +02:00
Ivan Masár
cc96646e37 DS-2474 METS format in OAI includes only the first author 2015-04-20 09:44:17 +02:00
Ivan Masár
d2ad7c81de DS-2491 set deletedRecord=transient in OAI Identify 2015-04-20 09:12:45 +02:00
Bram Luyten
00e9c1131f DS-2531 New entries for the robots hostname list 2015-04-20 09:09:37 +02:00
Chris Wilper
77cc9abe49 fix year and capitalization in displayed copyright 2015-04-16 20:28:45 +02:00
Mark H. Wood
91018bfe0f Merge pull request #909 from mwoodiupui/DS-2518-5x
[DS-2518] EZID DOI IdentityProvider doesn't set the location in metadata
2015-04-08 12:53:55 -04:00
Mark H. Wood
7f9bcb283f Repair testing environment, enable real unit tests, add test of metadata crosswalking. 2015-04-08 11:49:35 -04:00
Mark H. Wood
ae11c1c795 Add location metadata so that the DOI actually resolves properly. 2015-04-08 11:49:35 -04:00
cjuergen
9cd5fa596b Fix for DS-2482 adds the attribute dspace.community or dspace.collection to the search and browse request if we browse or search a community or collection 2015-04-08 17:24:02 +02:00
rradillen
e10b10224a [DS-2532] botness of a visit is not properly logged when a location cannot be determined
Extract isBot from the location==null if in two places.
2015-04-02 08:52:03 +02:00
Pascal-Nicolas Becker
e08886ae09 Merge pull request #898 from tuub/DS-2403-dspace-5_x
DS-2403: Resolves DS-2403 and reduce logging of RDFConsumer.
2015-03-25 19:42:30 +01:00
Pascal-Nicolas Becker
df3ffcf7f9 DS-2403: Resolves DS-2403 and reduce logging of RDFConsumer. 2015-03-25 19:14:20 +01:00
Andrea Schweer
0c77f7be91 [DS-2513] Improve multipart header parsing
The original code broke when files were uploaded whose name contains
the semicolon-space sequence.
2015-03-18 10:53:07 +01:00
David Cook
cdc8e3144e DS-2514 Packaged version of html5shiv.js is missing "main" element
Added "main" element to html5shiv.js, which should bring it inline
with the 3.6.2pre distribution from aFarkas's Github repo:
https://raw.githubusercontent.com/aFarkas/html5shiv/
3.6.2pre/dist/html5shiv.js

This can be verified by unminifying html5shiv.js in the above link
and html5shiv.js in DSpace master, and comparing them in any merge
program like vimdiff or WinMerge.

Without this patch, IE 8 self-closes the "main" element, and pushes
its child DIVs after it instead of nesting them within itself, which
has repercussions when styling the JSPUI with CSS.

With this patch, IE 8 comprehends the "main" element, and nests the
DIVs correctly.
2015-03-18 10:19:16 +01:00
Ivan Masár
92847079d7 Updated README.md 2015-03-16 18:59:08 +01:00
Ivan Masár
b023c36941 Updated README.md 2015-03-16 15:19:01 +01:00
ctu-developers
aee3b0b710 Updated README.md 2015-03-16 15:18:55 +01:00
Christian Scheible
d0c8afb601 DS-2424 workaround for bug in xoai library. changed ref to red for Filter in Contexts 2015-03-11 10:50:22 -05:00
Ivan Masár
e9c14bbcea DS-2501 fix SQL in REST /items/find-by-metadata-field 2015-03-09 22:17:14 +01:00
Àlex Magaz Graça
2eca19daa3 [DS-2493] "View more" link is shown even when there aren't more items. 2015-03-06 13:54:03 +01:00
Tim Donohue
bcc7a75baa DS-2483 : Fix mispelling of "sword.compatibility"
https://jira.duraspace.org/browse/DS-2483
2015-03-05 21:32:42 +00:00
Tim Donohue
19222e9341 DS-2477 : Ensure distribution packages alwasy get created with Unix (LF) line endings 2015-02-27 17:48:12 +01:00
Tim Donohue
8124a61738 [maven-release-plugin] prepare for next development iteration 2015-02-25 12:27:33 -06:00
Tim Donohue
09007146d0 [maven-release-plugin] prepare release dspace-5.1 2015-02-25 12:27:24 -06:00
Tim Donohue
e715c64404 Updates to LICENSES_THIRD_PARTY for 5.1 2015-02-25 11:54:42 -06:00
Luigi Andrea Pascarelli
53ff4510ac [DS-2044] fix cross-site scripting vulnerability and minor related issue
(verbose error output, avoid NPE on JSP during an attack)
2015-02-23 20:27:43 +00:00
Tim Donohue
495031001d DS-2278 : Fix two issues in XMLUI which block proper 404 Page Not Found pages from displaying for some URL patterns 2015-02-23 12:48:21 -06:00
Mark H. Wood
97e89384f1 Don't close the current sitemap if we never opened one 2015-02-22 15:43:01 -06:00
cjuergen
72913cda76 Fix for DS-2419 JSP UI ignores authorization.admin.usage. Just an incomplete name for the configuration parameter which determines the accessibility of usage statistics. 2015-02-22 15:42:31 -06:00
Tim Donohue
03097aaa35 DS-2448 - Fix for JSPUI path traversal issue from Pascal-Nicolas Becker 2015-02-20 22:38:42 +00:00
Tim Donohue
f6d3f67b52 Add in missing imports from previous commit 2015-02-20 20:20:26 +00:00
Luigi Andrea Pascarelli
62e0ac462e DS-1702 add code to prevent XSS attach on recent submission 2015-02-20 19:07:17 +00:00
Bill Tantzen
54310b014b fixed formatting 2015-02-20 12:30:14 -06:00
Bill Tantzen
beaf54f624 added synchroniztion for ArrayLists: agents and domains 2015-02-20 12:30:03 -06:00
Tim Donohue
114f1e0985 XMLUI path bug fixes and security fixes for DS-2445 DS-2130 DS-1896 2015-02-20 17:31:30 +00:00
Hardy Pottinger
1fdfe05c4c clarified the test expression, as a kindness to future generations 2015-02-17 14:45:42 -06:00
Hardy Pottinger
9c1f91d40b added back null handling, which I inadevertently dropped in favor of emptystring handling 2015-02-17 14:45:31 -06:00
Hardy Pottinger
39711b332f [DS-2034] refactored notnull method to notempty, at the suggestion of mhwood 2015-02-17 14:45:19 -06:00
Hardy Pottinger
6cfda147b4 [DS-2034] added emptystring handling to the notnull method in ControlPanel.java 2015-02-17 14:45:09 -06:00
Tim Donohue
eabdc610a0 Merge pull request #857 from tdonohue/DS-2427
Fix DS-2427 for 5.1 by consolidating problematic schema code into DatabaseUtils...
2015-02-11 15:51:25 -06:00
Tim Donohue
da74f5aa7e Add back in missing "canonicalize()" for Oracle 2015-02-04 10:58:11 -06:00
Tim Donohue
14c575a7c4 Fix DS-2427 for 5.1 by consolidating problematic code into DatabaseUtils.getSchemaName() so that it can be replaced easily in future. Also cleaned up config comments 2015-02-04 10:44:26 -06:00
Christian Scheible
d8c8d28c13 [DS-2438] fixed problem with immense metadata values for oai solr core 2015-02-04 10:05:50 +01:00
Pascal-Nicolas Becker
bf56f1f7e3 DS-640: Fixes Internal System Error if browse index is missing in JSPUI. 2015-02-03 15:29:21 +01:00
Pascal-Nicolas Becker
8046d154ee DS-2435: JSPUI send 400 Bad Request for unexisting browse index. 2015-02-03 15:29:13 +01:00
ctu-developers
589117e204 Add canonicalize for "db.schema" property
DS-2201: Unable to complete installation of DSpace with non-empty variable "db.schema" configuration file "build.properties"
2015-01-28 11:20:19 -06:00
Christian Scheible
e9e5423f97 [DS-2425] fixed typos in xoai.xml 2015-01-27 11:45:44 +01:00
Ondřej Košarko
c08f447cec ResumptionToken link for other verbs 2015-01-20 16:37:54 +01:00
Ivan Masár
cf25175155 [maven-release-plugin] prepare for next development iteration 2015-01-20 15:27:37 +01:00
406 changed files with 14385 additions and 6083 deletions

6
.dockerignore Normal file
View File

@@ -0,0 +1,6 @@
.git/
.idea/
.settings/
*/target/
dspace/modules/*/target/
Dockerfile.*

6
.gitattributes vendored
View File

@@ -1,6 +1,12 @@
# Auto detect text files and perform LF normalization
* text=auto
# Ensure Unix files always keep Unix line endings
*.sh text eol=lf
# Ensure Windows files always keep Windows line endings
*.bat text eol=crlf
# Standard to msysgit
*.doc diff=astextplain
*.DOC diff=astextplain

84
.github/workflows/build.yml vendored Normal file
View File

@@ -0,0 +1,84 @@
# DSpace 5.x Continuous Integration/Build via GitHub Actions
# Concepts borrowed from
# https://docs.github.com/en/free-pro-team@latest/actions/guides/building-and-testing-java-with-maven
name: Build
# Run this Build for all pushes / PRs to current branch
on: [push, pull_request]
jobs:
tests:
runs-on: ubuntu-18.04
env:
# Give Maven 1GB of memory to work with
# Suppress all Maven "downloading" messages in Travis logs (see https://stackoverflow.com/a/35653426)
# This also slightly speeds builds, as there is less logging
MAVEN_OPTS: "-Xmx1024M -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn"
# These are the actual CI steps to perform per job
steps:
# https://github.com/actions/checkout
- name: Checkout codebase
uses: actions/checkout@v1
# https://github.com/actions/setup-java
- name: Install JDK 8
uses: actions/setup-java@v1
with:
java-version: 8
# https://github.com/actions/setup-ruby
- name: Install Ruby v2.4 (for Mirage 2)
uses: actions/setup-ruby@v1
with:
ruby-version: 2.4
- name: Install Node.js v6.5 (for Mirage 2)
shell: bash -l {0}
run: nvm install 6.5.0
# Install prerequisites for building Mirage2 more rapidly
# Includes NPM, Bower, Grunt, Ruby, Sass, Compass
# These versions should be kept in sync with ./dspace/modules/xml-mirage2/pom.xml
- name: Install Mirage 2 prerequisites
run: |
sudo npm install -g npm@3.10.8
npm --version
sudo npm install -g bower
sudo npm install -g grunt && sudo npm install -g grunt-cli
grunt --version
gem install sass -v 3.4.25
sass -v
gem install compass -v 1.0.1
compass version
gem env
# https://github.com/actions/cache
- name: Cache Maven dependencies
uses: actions/cache@v2
with:
# Cache entire ~/.m2/repository
path: ~/.m2/repository
# Cache key is hash of all pom.xml files. Therefore any changes to POMs will invalidate cache
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: ${{ runner.os }}-maven-
# [Build & Unit Test] Check source code licenses and run source code Unit Tests
# license:check => Validate all source code license headers
# -Dmaven.test.skip=false => Enable DSpace Unit Tests
# -DskipITs=false => Enable DSpace Integration Tests
# -P !assembly => Skip normal assembly (as it can be memory intensive)
# -B => Maven batch/non-interactive mode (recommended for CI)
# -V => Display Maven version info before build
# -Dsurefire.rerunFailingTestsCount=2 => try again for flaky tests, and keep track of/report on number of retries
- name: Run Maven Build & Test
run: mvn clean install license:check -Dmaven.test.skip=false -DskipITs=false -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2
# [Assemble DSpace] Ensure assembly process works (from [src]/dspace/), including Mirage 2
# -Dmirage2.on=true => Build Mirage2
# -Dmirage2.deps.included=false => Don't include Mirage2 build dependencies (we installed them above)
# -P !assembly => SKIP the actual building of [src]/dspace/dspace-installer (as it can be memory intensive)
- name: Assemble DSpace & Build Mirage 2
run: cd dspace && mvn package -Dmirage2.on=true -Dmirage2.deps.included=false -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2
env:
# Set GEM env variables (for Mirage 2) based on output of 'gem env' in "Install Mirage 2 prerequisites" step
GEM_HOME: "/home/runner/.gem/ruby/2.4.0"
GEM_PATH: "/home/runner/.gem/ruby/2.4.0:/opt/hostedtoolcache/Ruby/2.4.10/x64/lib/ruby/gems/2.4.0"

1
.gitignore vendored
View File

@@ -3,6 +3,7 @@ target/
## Ignore project files created by Eclipse
.settings/
/bin/
.project
.classpath

View File

@@ -1,45 +0,0 @@
language: java
env:
# Give Maven 1GB of memory to work with
- MAVEN_OPTS=-Xmx1024M
# Install prerequisites for building Mirage2 more rapidly
before_install:
# Install latest Node.js 0.10.x & print version info
- nvm install 0.10
- node --version
# Install Bower
- npm install -g bower
# Install Grunt & print version info
- npm install -g grunt && npm install -g grunt-cli
- grunt --version
# Print ruby version info (should be installed)
- ruby -v
# Install Sass & print version info
- gem install sass
- sass -v
# Install Compass & print version info
- gem install compass
- compass version
# Skip install stage, as we'll do it below
install: "echo 'Skipping install stage, dependencies will be downloaded during build and test stages.'"
# Two stage Build and Test
# 1. Install & Unit Test APIs
# 2. Assemble DSpace
script:
# 1. [Install & Unit Test] Check source code licenses and run source code Unit Tests
# (This explicitly skips building the 'dspace' assembly module, since we only want to do that ONCE.)
# license:check => Validate all source code license headers
# -Dmaven.test.skip=false => Enable DSpace Unit Tests
# -P !dspace => SKIP full DSpace assembly (will do below)
# -B => Maven batch/non-interactive mode (recommended for CI)
# -V => Display Maven version info before build
- "mvn clean install license:check -Dmaven.test.skip=false -P !dspace -B -V"
# 2. [Assemble DSpace] Ensure assembly process works (from [src]/dspace/), including Mirage 2
# -Dmirage2.on=true => Build Mirage2
# -Dmirage2.deps.included=false => Don't include Mirage2 build dependencies (We installed them in before_install)
# -P !assembly => SKIP the actual building of [src]/dspace/dspace-installer (as it can be memory intensive)
- "cd dspace && mvn package -Dmirage2.on=true -Dmirage2.deps.included=false -P !assembly -B -V"

60
Dockerfile.cli.jdk8 Normal file
View File

@@ -0,0 +1,60 @@
# This image will be published as dspace/dspace
# See https://dspace-labs.github.io/DSpace-Docker-Images/ for usage details
#
# This version is JDK8 compatible
# - openjdk:8-jdk
# - ANT 1.10.7
# - maven:3-jdk-8
# - note:
# - default tag for branch: dspace/dspace-cli: dspace/dspace-cli:dspace-5_x
# Step 1 - Run Maven Build
FROM dspace/dspace-dependencies:dspace-5_x-jdk8 as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-install directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY --chown=dspace dspace/src/main/docker/build.properties /app/build.properties
# Build DSpace. Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
RUN mvn package -P'!dspace-solr,!dspace-jspui,!dspace-xmlui,!dspace-rest,!dspace-xmlui-mirage2,!dspace-rdf,!dspace-sword,!dspace-swordv2' && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Step 2 - Run Ant Deploy
FROM openjdk:8-jdk as ant_build
ARG TARGET_DIR=dspace-installer
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION 1.10.7
ENV ANT_HOME /tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH
RUN mkdir $ANT_HOME && \
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
RUN ant init_installation update_configs update_code
# Step 3 - Run jdk
# Create a new jdk image that does not retain the the build directory contents
FROM openjdk:8-jdk
ENV DSPACE_INSTALL=/dspace
COPY --from=ant_build /dspace $DSPACE_INSTALL
ENV JAVA_OPTS=-Xmx1000m
# This script is used to parse environment variables to configs in dspace.cfg
# It is not executed in this Docker file, so it's up to docker-compose to call it.
COPY dspace/src/main/docker/parse_env_to_configs.sh $DSPACE_INSTALL/bin/parse_env_to_configs.sh
# Ensure all scripts are executable
RUN chmod +x $DSPACE_INSTALL/bin/*

View File

@@ -0,0 +1,24 @@
# This image will be published as dspace/dspace-dependencies:dspace-5_x-jdk7
# The purpose of this image is to make the build for dspace/dspace run faster
# Step 1 - Run Maven Build
FROM maven:3-jdk-7 as build
WORKDIR /app
# The Mirage2 build cannot run as root. Setting the user to dspace.
RUN useradd dspace \
&& mkdir /home/dspace /app/target \
&& chown -Rv dspace: /home/dspace /app /app/target
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY --chown=dspace dspace/src/main/docker/build.properties /app/build.properties
# Trigger the installation of all maven dependencies including the Mirage2 dependencies
# Clean up the built artifacts in the same step to keep the docker image small
RUN mvn package -Dmirage2.on=true && cd /app && mvn clean
# Clear the contents of the /app directory so no artifacts are left when dspace:dspace is built
USER root
RUN rm -rf /app/*

View File

@@ -0,0 +1,24 @@
# This image will be published as dspace/dspace-dependencies:dspace-5_x-jdk8
# The purpose of this image is to make the build for dspace/dspace run faster
# Step 1 - Run Maven Build
FROM maven:3-jdk-8 as build
WORKDIR /app
# The Mirage2 build cannot run as root. Setting the user to dspace.
RUN useradd dspace \
&& mkdir /home/dspace /app/target \
&& chown -Rv dspace: /home/dspace /app /app/target
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/build.properties /app/build.properties
# Trigger the installation of all maven dependencies including the Mirage2 dependencies
# Clean up the built artifacts in the same step to keep the docker image small
RUN mvn package -Dmirage2.on=true && mvn clean
# Clear the contents of the /app directory so no artifacts are left when dspace:dspace is built
USER root
RUN rm -rf /app/*

63
Dockerfile.jdk7 Normal file
View File

@@ -0,0 +1,63 @@
# This image will be published as dspace/dspace
# See https://dspace-labs.github.io/DSpace-Docker-Images/ for usage details
#
# This version is JDK7 compatible
# - tomcat:7-jre7
# - ANT 1.9.14
# - maven:3-jdk-7
# - note:
# - default tag for branch: dspace/dspace: dspace/dspace:dspace-5_x-jdk7
# Step 1 - Run Maven Build
FROM dspace/dspace-dependencies:dspace-5_x-jdk7 as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-install directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY --chown=dspace dspace/src/main/docker/build.properties /app/build.properties
# Build DSpace. Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
RUN mvn package -Dmirage2.on=true && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Step 2 - Run Ant Deploy
FROM tomcat:7-jre7 as ant_build
ARG TARGET_DIR=dspace-installer
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION 1.9.14
ENV ANT_HOME /tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH
RUN mkdir $ANT_HOME && \
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
RUN ant init_installation update_configs update_code update_webapps update_solr_indexes
# Step 3 - Run tomcat
# Create a new tomcat image that does not retain the the build directory contents
FROM tomcat:7-jre7
ENV DSPACE_INSTALL=/dspace
COPY --from=ant_build /dspace $DSPACE_INSTALL
EXPOSE 8080 8009
ENV JAVA_OPTS=-Xmx2000m
RUN ln -s $DSPACE_INSTALL/webapps/solr /usr/local/tomcat/webapps/solr && \
ln -s $DSPACE_INSTALL/webapps/xmlui /usr/local/tomcat/webapps/xmlui && \
ln -s $DSPACE_INSTALL/webapps/jspui /usr/local/tomcat/webapps/jspui && \
ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest && \
ln -s $DSPACE_INSTALL/webapps/oai /usr/local/tomcat/webapps/oai && \
ln -s $DSPACE_INSTALL/webapps/rdf /usr/local/tomcat/webapps/rdf && \
ln -s $DSPACE_INSTALL/webapps/sword /usr/local/tomcat/webapps/sword && \
ln -s $DSPACE_INSTALL/webapps/swordv2 /usr/local/tomcat/webapps/swordv2

69
Dockerfile.jdk7-test Normal file
View File

@@ -0,0 +1,69 @@
# This image will be published as dspace/dspace
# See https://dspace-labs.github.io/DSpace-Docker-Images/ for usage details
#
# This version is JDK7 compatible
# - tomcat:7-jre7
# - ANT 1.9.14
# - maven:3-jdk-7
# - note:
# - default tag for branch: dspace/dspace: dspace/dspace:dspace-5_x-jdk7
# Step 1 - Run Maven Build
FROM dspace/dspace-dependencies:dspace-5_x-jdk7 as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-install directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY --chown=dspace dspace/src/main/docker/build.properties /app/build.properties
# Build DSpace. Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
RUN mvn package -Dmirage2.on=true && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Step 2 - Run Ant Deploy
FROM tomcat:7-jre7 as ant_build
ARG TARGET_DIR=dspace-installer
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION 1.9.14
ENV ANT_HOME /tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH
RUN mkdir $ANT_HOME && \
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
RUN ant init_installation update_configs update_code update_webapps update_solr_indexes
# Step 3 - Run tomcat
# Create a new tomcat image that does not retain the the build directory contents
FROM tomcat:7-jre7
ENV DSPACE_INSTALL=/dspace
COPY --from=ant_build /dspace $DSPACE_INSTALL
EXPOSE 8080 8009
ENV JAVA_OPTS=-Xmx2000m
RUN ln -s $DSPACE_INSTALL/webapps/solr /usr/local/tomcat/webapps/solr && \
ln -s $DSPACE_INSTALL/webapps/xmlui /usr/local/tomcat/webapps/xmlui && \
ln -s $DSPACE_INSTALL/webapps/jspui /usr/local/tomcat/webapps/jspui && \
ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest && \
ln -s $DSPACE_INSTALL/webapps/oai /usr/local/tomcat/webapps/oai && \
ln -s $DSPACE_INSTALL/webapps/rdf /usr/local/tomcat/webapps/rdf && \
ln -s $DSPACE_INSTALL/webapps/sword /usr/local/tomcat/webapps/sword && \
ln -s $DSPACE_INSTALL/webapps/swordv2 /usr/local/tomcat/webapps/swordv2
COPY dspace/src/main/docker/test/solr_web.xml $DSPACE_INSTALL/webapps/solr/WEB-INF/web.xml
COPY dspace/src/main/docker/test/rest_web.xml $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml
RUN sed -i -e "s|\${dspace.dir}|$DSPACE_INSTALL|" $DSPACE_INSTALL/webapps/solr/WEB-INF/web.xml && \
sed -i -e "s|\${dspace.dir}|$DSPACE_INSTALL|" $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml

72
Dockerfile.jdk8 Normal file
View File

@@ -0,0 +1,72 @@
# This image will be published as dspace/dspace
# See https://dspace-labs.github.io/DSpace-Docker-Images/ for usage details
#
# This version is JDK8 compatible
# - tomcat:7-jre8
# - ANT 1.10.7
# - maven:3-jdk-8
# - note:
# - default tag for branch: dspace/dspace: dspace/dspace:dspace-5_x-jdk8
# Step 1 - Run Maven Build
FROM dspace/dspace-dependencies:dspace-5_x-jdk8 as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-install directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY --chown=dspace dspace/src/main/docker/build.properties /app/build.properties
# Build DSpace. Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
RUN mvn package -Dmirage2.on=true && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Step 2 - Run Ant Deploy
FROM tomcat:7-jre8 as ant_build
ARG TARGET_DIR=dspace-installer
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION 1.10.7
ENV ANT_HOME /tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH
RUN mkdir $ANT_HOME && \
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
RUN ant init_installation update_configs update_code update_webapps update_solr_indexes
# Step 3 - Run tomcat
# Create a new tomcat image that does not retain the the build directory contents
FROM tomcat:7-jre8
ENV DSPACE_INSTALL=/dspace
COPY --from=ant_build /dspace $DSPACE_INSTALL
EXPOSE 8080 8009
ENV JAVA_OPTS=-Xmx2000m
RUN ln -s $DSPACE_INSTALL/webapps/solr /usr/local/tomcat/webapps/solr && \
ln -s $DSPACE_INSTALL/webapps/xmlui /usr/local/tomcat/webapps/xmlui && \
ln -s $DSPACE_INSTALL/webapps/jspui /usr/local/tomcat/webapps/jspui && \
ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest && \
ln -s $DSPACE_INSTALL/webapps/oai /usr/local/tomcat/webapps/oai && \
ln -s $DSPACE_INSTALL/webapps/rdf /usr/local/tomcat/webapps/rdf && \
ln -s $DSPACE_INSTALL/webapps/sword /usr/local/tomcat/webapps/sword && \
ln -s $DSPACE_INSTALL/webapps/swordv2 /usr/local/tomcat/webapps/swordv2
COPY dspace/src/main/docker/parse_env_to_configs.sh $DSPACE_INSTALL/bin/parse_env_to_configs.sh
# Ensure all scripts are executable
RUN chmod +x $DSPACE_INSTALL/bin/*
# On startup run this command to parse environment variables to configs in dspace.cfg
# Then start Tomcat
CMD ["sh", "-c", "$DSPACE_INSTALL/bin/parse_env_to_configs.sh && catalina.sh run"]

78
Dockerfile.jdk8-test Normal file
View File

@@ -0,0 +1,78 @@
# This image will be published as dspace/dspace
# See https://dspace-labs.github.io/DSpace-Docker-Images/ for usage details
#
# This version is JDK8 compatible
# - tomcat:7-jre8
# - ANT 1.10.7
# - maven:3-jdk-8
# - note:
# - default tag for branch: dspace/dspace: dspace/dspace:dspace-5_x-jdk8
# Step 1 - Run Maven Build
FROM dspace/dspace-dependencies:dspace-5_x-jdk8 as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-install directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY --chown=dspace dspace/src/main/docker/build.properties /app/build.properties
# Build DSpace. Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
RUN mvn package -Dmirage2.on=true && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Step 2 - Run Ant Deploy
FROM tomcat:7-jre8 as ant_build
ARG TARGET_DIR=dspace-installer
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION 1.10.7
ENV ANT_HOME /tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH
RUN mkdir $ANT_HOME && \
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
RUN ant init_installation update_configs update_code update_webapps update_solr_indexes
# Step 3 - Run tomcat
# Create a new tomcat image that does not retain the the build directory contents
FROM tomcat:7-jre8
ENV DSPACE_INSTALL=/dspace
COPY --from=ant_build /dspace $DSPACE_INSTALL
EXPOSE 8080 8009
ENV JAVA_OPTS=-Xmx2000m
RUN ln -s $DSPACE_INSTALL/webapps/solr /usr/local/tomcat/webapps/solr && \
ln -s $DSPACE_INSTALL/webapps/xmlui /usr/local/tomcat/webapps/xmlui && \
ln -s $DSPACE_INSTALL/webapps/jspui /usr/local/tomcat/webapps/jspui && \
ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest && \
ln -s $DSPACE_INSTALL/webapps/oai /usr/local/tomcat/webapps/oai && \
ln -s $DSPACE_INSTALL/webapps/rdf /usr/local/tomcat/webapps/rdf && \
ln -s $DSPACE_INSTALL/webapps/sword /usr/local/tomcat/webapps/sword && \
ln -s $DSPACE_INSTALL/webapps/swordv2 /usr/local/tomcat/webapps/swordv2
COPY dspace/src/main/docker/test/solr_web.xml $DSPACE_INSTALL/webapps/solr/WEB-INF/web.xml
COPY dspace/src/main/docker/test/rest_web.xml $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml
RUN sed -i -e "s|\${dspace.dir}|$DSPACE_INSTALL|" $DSPACE_INSTALL/webapps/solr/WEB-INF/web.xml && \
sed -i -e "s|\${dspace.dir}|$DSPACE_INSTALL|" $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml
COPY dspace/src/main/docker/parse_env_to_configs.sh $DSPACE_INSTALL/bin/parse_env_to_configs.sh
# Ensure all scripts are executable
RUN chmod +x $DSPACE_INSTALL/bin/*
# On startup run this command to parse environment variables to configs in dspace.cfg
# Then start Tomcat
CMD ["sh", "-c", "$DSPACE_INSTALL/bin/parse_env_to_configs.sh && catalina.sh run"]

23
LICENSE
View File

@@ -1,7 +1,6 @@
DSpace source code license:
BSD 3-Clause License
Copyright (c) 2002-2013, DuraSpace. All rights reserved.
Copyright (c) 2002-2022, LYRASIS. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
@@ -14,13 +13,12 @@ notice, this list of conditions and the following disclaimer.
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name DuraSpace nor the name of the DSpace Foundation
nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written
permission.
- Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
@@ -30,11 +28,4 @@ OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.
DSpace uses third-party libraries which may be distributed under
different licenses to the above. Information about these licenses
is detailed in the LICENSES_THIRD_PARTY file at the root of the source
tree. You must agree to the terms of these licenses, in addition to
the above DSpace source code license, in order to use this software.
DAMAGE.

View File

@@ -22,13 +22,18 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Ant-Contrib Tasks (ant-contrib:ant-contrib:1.0b3 - http://ant-contrib.sourceforge.net)
* Code Generation Library (cglib:cglib:3.1 - http://cglib.sourceforge.net/)
* AWS SDK for Java - Core (com.amazonaws:aws-java-sdk-core:1.10.50 - https://aws.amazon.com/sdkforjava)
* AWS Java SDK for AWS KMS (com.amazonaws:aws-java-sdk-kms:1.10.50 - https://aws.amazon.com/sdkforjava)
* AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.10.50 - https://aws.amazon.com/sdkforjava)
* HPPC Collections (com.carrotsearch:hppc:0.5.2 - http://labs.carrotsearch.com/hppc.html/hppc)
* metadata-extractor (com.drewnoakes:metadata-extractor:2.6.2 - http://code.google.com/p/metadata-extractor/)
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.3.0 - http://wiki.fasterxml.com/JacksonHome)
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.1.3 - http://wiki.fasterxml.com/JacksonHome)
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.9.3 - http://github.com/FasterXML/jackson)
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.3.3 - http://wiki.fasterxml.com/JacksonHome)
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.9.3 - https://github.com/FasterXML/jackson-core)
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.3.3 - http://wiki.fasterxml.com/JacksonHome)
* Google APIs Client Library for Java (com.google.api-client:google-api-client:1.19.0 - http://code.google.com/p/google-api-java-client/google-api-client/)
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.9.3 - http://github.com/FasterXML/jackson)
* Google APIs Client Library for Java (com.google.api-client:google-api-client:1.19.1 - http://code.google.com/p/google-api-java-client/google-api-client/)
* Google Analytics API v3-rev103-1.19.0 (com.google.apis:google-api-services-analytics:v3-rev103-1.19.0 - http://nexus.sonatype.org/oss-repository-hosting.html/google-api-services-analytics)
* FindBugs-jsr305 (com.google.code.findbugs:jsr305:3.0.0 - http://findbugs.sourceforge.net/)
* Gson (com.google.code.gson:gson:2.2.1 - http://code.google.com/p/google-gson/)
@@ -39,8 +44,6 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Google HTTP Client Library for Java (com.google.http-client:google-http-client:1.19.0 - http://code.google.com/p/google-http-java-client/google-http-client/)
* Jackson 2 extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-jackson2:1.19.0 - http://code.google.com/p/google-http-java-client/google-http-client-jackson2/)
* Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.19.0 - http://code.google.com/p/google-oauth-java-client/google-oauth-client/)
* Java 6 (and higher) extensions to the Google OAuth Client Library for Java. (com.google.oauth-client:google-oauth-client-java6:1.19.0 - http://code.google.com/p/google-oauth-java-client/google-oauth-client-java6/)
* Jetty extensions to the Google OAuth Client Library for Java. (com.google.oauth-client:google-oauth-client-jetty:1.19.0 - http://code.google.com/p/google-oauth-java-client/google-oauth-client-jetty/)
* ConcurrentLinkedHashMap (com.googlecode.concurrentlinkedhashmap:concurrentlinkedhashmap-lru:1.2 - http://code.google.com/p/concurrentlinkedhashmap)
* FORESITE :: Object Reuse and Exchange library (com.googlecode.foresite-toolkit:foresite:0.9 - http://www.openarchives.org/ore)
* ISO Parser (com.googlecode.mp4parser:isoparser:1.0-RC-1 - http://code.google.com/p/mp4parser/)
@@ -49,12 +52,13 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Jtwig Core Functions (com.lyncode:jtwig-functions:2.0.1 - http://www.lyncode.com/jtwig-functions)
* Jtwig Spring (com.lyncode:jtwig-spring:2.0.1 - http://www.lyncode.com/jtwig-spring)
* Test Support (com.lyncode:test-support:1.0.3 - http://nexus.sonatype.org/oss-repository-hosting.html/test-support)
* XOAI : OAI-PMH Java Toolkit (com.lyncode:xoai:3.2.9 - http://www.lyncode.com)
* MaxMind DB Reader (com.maxmind.db:maxmind-db:1.2.2 - http://dev.maxmind.com/)
* MaxMind GeoIP2 API (com.maxmind.geoip2:geoip2:2.11.0 - http://dev.maxmind.com/geoip/geoip2/web-services)
* Spatial4J (com.spatial4j:spatial4j:0.4.1 - https://github.com/spatial4j/spatial4j)
* Commons BeanUtils (commons-beanutils:commons-beanutils:1.8.3 - http://commons.apache.org/beanutils/)
* Commons CLI (commons-cli:commons-cli:1.2 - http://commons.apache.org/cli/)
* Apache Commons Codec (commons-codec:commons-codec:1.9 - http://commons.apache.org/proper/commons-codec/)
* Collections (commons-collections:commons-collections:3.2 - http://jakarta.apache.org/commons/collections/)
* Apache Commons Collections (commons-collections:commons-collections:3.2.2 - http://commons.apache.org/collections/)
* Commons Configuration (commons-configuration:commons-configuration:1.6 - http://commons.apache.org/${pom.artifactId.substring(8)}/)
* Commons Configuration (commons-configuration:commons-configuration:1.8 - http://commons.apache.org/configuration/)
* Commons DBCP (commons-dbcp:commons-dbcp:1.4 - http://commons.apache.org/dbcp/)
@@ -119,9 +123,8 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Excalibur Instrument API (org.apache.excalibur.containerkit:excalibur-instrument-api:2.2.1 - http://www.apache.org/excalibur/excalibur-containerkit/excalibur-instrument-modules/excalibur-instrument-api/)
* Excalibur Logger (org.apache.excalibur.containerkit:excalibur-logger:2.2.1 - http://www.apache.org/excalibur/excalibur-containerkit/excalibur-logger/)
* Activation (org.apache.geronimo.specs:geronimo-activation_1.0.2_spec:1.1 - http://geronimo.apache.org/geronimo-activation_1.0.2_spec)
* Activation 1.1 (org.apache.geronimo.specs:geronimo-activation_1.1_spec:1.0.2 - http://geronimo.apache.org/specs/geronimo-activation_1.1_spec)
* JavaMail 1.4 (org.apache.geronimo.specs:geronimo-javamail_1.4_spec:1.6 - http://geronimo.apache.org/maven/specs/geronimo-javamail_1.4_spec/1.6)
* Streaming API for XML (STAX API 1.0) (org.apache.geronimo.specs:geronimo-stax-api_1.0_spec:1.0 - http://geronimo.apache.org/specs/geronimo-stax-api_1.0_spec)
* Activation 1.1 (org.apache.geronimo.specs:geronimo-activation_1.1_spec:1.1 - http://geronimo.apache.org/maven/specs/geronimo-activation_1.1_spec/1.1)
* JavaMail 1.4 (org.apache.geronimo.specs:geronimo-javamail_1.4_spec:1.7.1 - http://geronimo.apache.org/maven/specs/geronimo-javamail_1.4_spec/1.7.1)
* Streaming API for XML (STAX API 1.0) (org.apache.geronimo.specs:geronimo-stax-api_1.0_spec:1.0.1 - http://geronimo.apache.org/specs/geronimo-stax-api_1.0_spec)
* Apache Hadoop Annotations (org.apache.hadoop:hadoop-annotations:2.2.0 - no url defined)
* Apache Hadoop Auth (org.apache.hadoop:hadoop-auth:2.2.0 - no url defined)
@@ -158,9 +161,9 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Lucene Sandbox (org.apache.lucene:lucene-sandbox:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-sandbox)
* Lucene Spatial (org.apache.lucene:lucene-spatial:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-spatial)
* Lucene Suggest (org.apache.lucene:lucene-suggest:4.10.2 - http://lucene.apache.org/lucene-parent/lucene-suggest)
* Apache FontBox (org.apache.pdfbox:fontbox:1.8.7 - http://pdfbox.apache.org/)
* Apache JempBox (org.apache.pdfbox:jempbox:1.8.7 - http://www.apache.org/pdfbox-parent/jempbox/)
* Apache PDFBox (org.apache.pdfbox:pdfbox:1.8.7 - http://www.apache.org/pdfbox-parent/pdfbox/)
* Apache FontBox (org.apache.pdfbox:fontbox:1.8.12 - http://pdfbox.apache.org/)
* Apache JempBox (org.apache.pdfbox:jempbox:1.8.12 - http://www.apache.org/pdfbox-parent/jempbox/)
* Apache PDFBox (org.apache.pdfbox:pdfbox:1.8.12 - http://www.apache.org/pdfbox-parent/pdfbox/)
* Apache POI (org.apache.poi:poi:3.6 - http://poi.apache.org/)
* Apache POI (org.apache.poi:poi-ooxml:3.6 - http://poi.apache.org/)
* Apache POI (org.apache.poi:poi-ooxml-schemas:3.10.1 - http://poi.apache.org/)
@@ -174,8 +177,8 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Apache Tika core (org.apache.tika:tika-core:1.5 - http://tika.apache.org/)
* Apache Tika parsers (org.apache.tika:tika-parsers:1.5 - http://tika.apache.org/)
* Apache Tika XMP (org.apache.tika:tika-xmp:1.5 - http://tika.apache.org/)
* Axiom API (org.apache.ws.commons.axiom:axiom-api:1.2.10 - http://ws.apache.org/axiom/axiom-api/)
* Axiom Impl (org.apache.ws.commons.axiom:axiom-impl:1.2.10 - http://ws.apache.org/axiom/axiom-impl/)
* Axiom API (org.apache.ws.commons.axiom:axiom-api:1.2.14 - http://ws.apache.org/axiom/)
* Axiom Impl (org.apache.ws.commons.axiom:axiom-impl:1.2.14 - http://ws.apache.org/axiom/)
* XmlBeans (org.apache.xmlbeans:xmlbeans:2.3.0 - http://xmlbeans.apache.org)
* XmlBeans (org.apache.xmlbeans:xmlbeans:2.6.0 - http://xmlbeans.apache.org)
* zookeeper (org.apache.zookeeper:zookeeper:3.4.6 - no url defined)
@@ -188,6 +191,7 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Data Mapper for Jackson (org.codehaus.jackson:jackson-mapper-asl:1.9.2 - http://jackson.codehaus.org)
* Xml Compatibility extensions for Jackson (org.codehaus.jackson:jackson-xc:1.9.2 - http://jackson.codehaus.org)
* Jettison (org.codehaus.jettison:jettison:1.1 - no url defined)
* Woodstox (org.codehaus.woodstox:woodstox-core-asl:4.1.4 - http://woodstox.codehaus.org)
* Woodstox (org.codehaus.woodstox:wstx-asl:3.2.0 - http://woodstox.codehaus.org)
* Woodstox (org.codehaus.woodstox:wstx-asl:3.2.7 - http://woodstox.codehaus.org)
* databene ContiPerf (org.databene:contiperf:2.2.0 - http://databene.org/contiperf)
@@ -196,12 +200,10 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Ogg and Vorbis for Java, Core (org.gagravarr:vorbis-java-core:0.1 - https://github.com/Gagravarr/VorbisJava)
* Apache Tika plugin for Ogg, Vorbis and FLAC (org.gagravarr:vorbis-java-tika:0.1 - https://github.com/Gagravarr/VorbisJava)
* Javassist (org.javassist:javassist:3.16.1-GA - http://www.javassist.org/)
* Javassist (org.javassist:javassist:3.18.1-GA - http://www.javassist.org/)
* Jetty Server (org.mortbay.jetty:jetty:6.1.14 - http://jetty.mortbay.org/project/modules/jetty)
* Jetty Server (org.mortbay.jetty:jetty:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/modules/jetty)
* Jetty Servlet Tester (org.mortbay.jetty:jetty-servlet-tester:6.1.14 - http://jetty.mortbay.org/project/jetty-servlet-tester)
* Jetty Utilities (org.mortbay.jetty:jetty-util:6.1.14 - http://jetty.mortbay.org/project/jetty-util)
* Jetty Utilities (org.mortbay.jetty:jetty-util:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-util)
* Servlet Specification API (org.mortbay.jetty:servlet-api:2.5-20081211 - http://jetty.mortbay.org/servlet-api)
* Noggit (org.noggit:noggit:0.5 - http://noggit.org)
* parboiled-core (org.parboiled:parboiled-core:1.1.6 - http://parboiled.org)
* parboiled-java (org.parboiled:parboiled-java:1.1.6 - http://parboiled.org)
@@ -248,7 +250,6 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
BSD License:
* ASM Core (asm:asm:3.1 - http://asm.objectweb.org/asm/)
* XMP Library for Java (com.adobe.xmp:xmpcore:5.1.2 - http://www.adobe.com/devnet/xmp.html)
* coverity-escapers (com.coverity.security:coverity-escapers:1.1.1 - http://coverity.com/security)
* JSONLD Java :: Core (com.github.jsonld-java:jsonld-java:0.5.0 - http://github.com/jsonld-java/jsonld-java/jsonld-java/)
@@ -265,54 +266,70 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Morfologik FSA (org.carrot2:morfologik-fsa:1.7.1 - http://morfologik.blogspot.com/morfologik-fsa/)
* Morfologik Stemming Dictionary for Polish (org.carrot2:morfologik-polish:1.7.1 - http://morfologik.blogspot.com/morfologik-polish/)
* Morfologik Stemming APIs (org.carrot2:morfologik-stemming:1.7.1 - http://morfologik.blogspot.com/morfologik-stemming/)
* Stax2 API (org.codehaus.woodstox:stax2-api:3.1.1 - http://woodstox.codehaus.org/StAX2)
* databene ContiPerf (org.databene:contiperf:2.2.0 - http://databene.org/contiperf)
* DSpace Kernel :: API and Implementation (org.dspace:dspace-api:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api)
* DSpace JSP-UI (org.dspace:dspace-jspui:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui)
* DSpace OAI-PMH (org.dspace:dspace-oai:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai)
* DSpace RDF (org.dspace:dspace-rdf:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf)
* DSpace REST :: API and Implementation (org.dspace:dspace-rest:5.0-rc4-SNAPSHOT - http://demo.dspace.org)
* DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services)
* Apache Solr Webapp (org.dspace:dspace-solr:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr)
* DSpace SWORD (org.dspace:dspace-sword:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword)
* DSpace SWORD v2 (org.dspace:dspace-swordv2:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2)
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui)
* DSpace Kernel :: API and Implementation (org.dspace:dspace-api:5.10-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api)
* DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:5.0.7 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-api-lang)
* DSpace JSP-UI (org.dspace:dspace-jspui:5.10-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui)
* DSpace OAI-PMH (org.dspace:dspace-oai:5.10-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai)
* DSpace RDF (org.dspace:dspace-rdf:5.10-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf)
* DSpace REST :: API and Implementation (org.dspace:dspace-rest:5.10-SNAPSHOT - http://demo.dspace.org)
* DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:5.10-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services)
* Apache Solr Webapp (org.dspace:dspace-solr:5.10-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr)
* DSpace SWORD (org.dspace:dspace-sword:5.10-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword)
* DSpace SWORD v2 (org.dspace:dspace-swordv2:5.10-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2)
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:5.10-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui)
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:5.0.7 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-xmlui-lang)
* handle (org.dspace:handle:6.2 - no url defined)
* jargon (org.dspace:jargon:1.4.25 - no url defined)
* mets (org.dspace:mets:1.5.2 - no url defined)
* oclc-harvester2 (org.dspace:oclc-harvester2:0.1.12 - no url defined)
* XOAI : OAI-PMH Java Toolkit (org.dspace:xoai:3.2.10 - http://nexus.sonatype.org/oss-repository-hosting.html/xoai)
* Repackaged Cocoon Servlet Service Implementation (org.dspace.dependencies.cocoon:dspace-cocoon-servlet-service-impl:1.0.3 - http://projects.dspace.org/dspace-pom/dspace-cocoon-servlet-service-impl)
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:5.10-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
* Hamcrest All (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all)
* Hamcrest Core (org.hamcrest:hamcrest-core:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-core)
* JBibTeX (org.jbibtex:jbibtex:1.0.10 - http://www.jbibtex.org)
* JAXB2 Basics - Runtime (org.jvnet.jaxb2_commons:jaxb2-basics-runtime:0.9.5 - https://github.com/highsource/jaxb2-basics/jaxb2-basics-runtime)
* ASM Core (org.ow2.asm:asm:4.1 - http://asm.objectweb.org/asm/)
* ASM Core (org.ow2.asm:asm:4.2 - http://asm.objectweb.org/asm/)
* ASM Analysis (org.ow2.asm:asm-analysis:4.1 - http://asm.objectweb.org/asm-analysis/)
* ASM Commons (org.ow2.asm:asm-commons:4.1 - http://asm.objectweb.org/asm-commons/)
* ASM Tree (org.ow2.asm:asm-tree:4.1 - http://asm.objectweb.org/asm-tree/)
* ASM Util (org.ow2.asm:asm-util:4.1 - http://asm.objectweb.org/asm-util/)
* PostgreSQL JDBC Driver (postgresql:postgresql:9.1-901-1.jdbc4 - http://jdbc.postgresql.org)
* XMLUnit for Java (xmlunit:xmlunit:1.1 - http://xmlunit.sourceforge.net/)
* XMLUnit for Java (xmlunit:xmlunit:1.3 - http://xmlunit.sourceforge.net/)
Common Development and Distribution License (CDDL):
* jersey-core (com.sun.jersey:jersey-core:1.17.1 - https://jersey.java.net/jersey-core/)
* jersey-json (com.sun.jersey:jersey-json:1.17.1 - https://jersey.java.net/jersey-json/)
* jersey-server (com.sun.jersey:jersey-server:1.17.1 - https://jersey.java.net/jersey-server/)
* jersey-servlet (com.sun.jersey:jersey-servlet:1.17.1 - https://jersey.java.net/jersey-servlet/)
* jersey-spring (com.sun.jersey.contribs:jersey-spring:1.8 - http://maven.apache.org)
* jersey-core (com.sun.jersey:jersey-core:1.19 - https://jersey.java.net/jersey-core/)
* jersey-json (com.sun.jersey:jersey-json:1.19 - https://jersey.java.net/jersey-json/)
* jersey-server (com.sun.jersey:jersey-server:1.19 - https://jersey.java.net/jersey-server/)
* jersey-servlet (com.sun.jersey:jersey-servlet:1.19 - https://jersey.java.net/jersey-servlet/)
* jersey-spring (com.sun.jersey.contribs:jersey-spring:1.19 - http://maven.apache.org)
* JAXB RI (com.sun.xml.bind:jaxb-impl:2.2.3-1 - http://jaxb.java.net/)
* JAXB Reference Implementation (com.sun.xml.bind:jaxb-impl:2.2.5 - http://jaxb.java.net/)
* JHighlight (com.uwyn:jhighlight:1.0 - https://jhighlight.dev.java.net/)
* JavaBeans Activation Framework (JAF) (javax.activation:activation:1.1 - http://java.sun.com/products/javabeans/jaf/index.jsp)
* javax.annotation API (javax.annotation:javax.annotation-api:1.2 - http://jcp.org/en/jsr/detail?id=250)
* JavaMail API (javax.mail:mail:1.4 - https://glassfish.dev.java.net/javaee5/mail/)
* Java Servlet API (javax.servlet:javax.servlet-api:3.1.0 - http://servlet-spec.java.net)
* jsp-api (javax.servlet:jsp-api:2.0 - no url defined)
* jstl (javax.servlet:jstl:1.1.2 - no url defined)
* servlet-api (javax.servlet:servlet-api:2.5 - no url defined)
* javax.ws.rs-api (javax.ws.rs:javax.ws.rs-api:2.0.1 - http://jax-rs-spec.java.net)
* jsr311-api (javax.ws.rs:jsr311-api:1.1.1 - https://jsr311.dev.java.net)
* JAXB API bundle for GlassFish V3 (javax.xml.bind:jaxb-api:2.2.2 - https://jaxb.dev.java.net/)
* Streaming API for XML (javax.xml.stream:stax-api:1.0-2 - no url defined)
* HK2 API module (org.glassfish.hk2:hk2-api:2.4.0-b31 - https://hk2.java.net/hk2-api)
* ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.4.0-b31 - https://hk2.java.net/hk2-locator)
* HK2 Implementation Utilities (org.glassfish.hk2:hk2-utils:2.4.0-b31 - https://hk2.java.net/hk2-utils)
* OSGi resource locator bundle - used by various API providers that rely on META-INF/services mechanism to locate providers. (org.glassfish.hk2:osgi-resource-locator:1.0.1 - http://glassfish.org/osgi-resource-locator/)
* aopalliance version 1.0 repackaged as a module (org.glassfish.hk2.external:aopalliance-repackaged:2.4.0-b31 - https://hk2.java.net/external/aopalliance-repackaged)
* javax.inject:1 as OSGi bundle (org.glassfish.hk2.external:javax.inject:2.4.0-b31 - https://hk2.java.net/external/javax.inject)
* jersey-repackaged-guava (org.glassfish.jersey.bundles.repackaged:jersey-guava:2.22.1 - https://jersey.java.net/project/project/jersey-guava/)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.22.1 - https://jersey.java.net/jersey-client/)
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.22.1 - https://jersey.java.net/jersey-common/)
* Servlet Specification 2.5 API (org.mortbay.jetty:servlet-api-2.5:6.1.14 - http://jetty.mortbay.org/project/modules/servlet-api-2.5)
* Restlet Core - API and Engine (org.restlet.jee:org.restlet:2.1.1 - http://www.restlet.org/org.restlet)
* Restlet Extension - Servlet (org.restlet.jee:org.restlet.ext.servlet:2.1.1 - http://www.restlet.org/org.restlet.ext.servlet)
@@ -322,8 +339,6 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* JUnit (junit:junit:4.11 - http://junit.org)
* AspectJ runtime (org.aspectj:aspectjrt:1.6.11 - http://www.aspectj.org)
* databene ContiPerf (org.databene:contiperf:2.2.0 - http://databene.org/contiperf)
* Jetty Server (org.mortbay.jetty:jetty:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/modules/jetty)
* Jetty Utilities (org.mortbay.jetty:jetty-util:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-util)
* Restlet Core - API and Engine (org.restlet.jee:org.restlet:2.1.1 - http://www.restlet.org/org.restlet)
* Restlet Extension - Servlet (org.restlet.jee:org.restlet.ext.servlet:2.1.1 - http://www.restlet.org/org.restlet.ext.servlet)
@@ -337,7 +352,6 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.3.3 - http://wiki.fasterxml.com/JacksonHome)
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.3.3 - http://wiki.fasterxml.com/JacksonHome)
* FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.0 - http://findbugs.sourceforge.net/)
* MaxMind GeoIP API (com.maxmind.geoip:geoip-api:1.2.11 - https://github.com/maxmind/geoip-api-java)
* JHighlight (com.uwyn:jhighlight:1.0 - https://jhighlight.dev.java.net/)
* JAX-RS provider for JSON content type (org.codehaus.jackson:jackson-jaxrs:1.9.2 - http://jackson.codehaus.org)
* Xml Compatibility extensions for Jackson (org.codehaus.jackson:jackson-xc:1.9.2 - http://jackson.codehaus.org)
@@ -345,6 +359,7 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* DSpace TM-Extractors Dependency (org.dspace.dependencies:dspace-tm-extractors:1.0.1 - http://projects.dspace.org/dspace-pom/dspace-tm-extractors)
* im4java (org.im4java:im4java:1.4.0 - http://sourceforge.net/projects/im4java/)
* Javassist (org.javassist:javassist:3.16.1-GA - http://www.javassist.org/)
* Javassist (org.javassist:javassist:3.18.1-GA - http://www.javassist.org/)
* org.jdesktop - Swing Worker (org.jdesktop:swing-worker:1.1 - no url defined)
* Restlet Core - API and Engine (org.restlet.jee:org.restlet:2.1.1 - http://www.restlet.org/org.restlet)
* Restlet Extension - Servlet (org.restlet.jee:org.restlet.ext.servlet:2.1.1 - http://www.restlet.org/org.restlet.ext.servlet)
@@ -362,6 +377,7 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Bouncy Castle CMS and S/MIME API (org.bouncycastle:bcmail-jdk15:1.44 - http://www.bouncycastle.org/java.html)
* Bouncy Castle Provider (org.bouncycastle:bcprov-jdk15:1.44 - http://www.bouncycastle.org/java.html)
* ORCID Java API generated via JAXB (org.dspace:orcid-jaxb-api:2.1.0 - https://github.com/DSpace/orcid-jaxb-api)
* Main (org.jmockit:jmockit:1.10 - http://www.jmockit.org)
* OpenCloud (org.mcavallo:opencloud:0.3 - http://opencloud.mcavallo.org/)
* Mockito (org.mockito:mockito-all:1.9.5 - http://www.mockito.org)
@@ -378,6 +394,7 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* juniversalchardet (com.googlecode.juniversalchardet:juniversalchardet:1.0.3 - http://juniversalchardet.googlecode.com/)
* H2 Database Engine (com.h2database:h2:1.4.180 - http://www.h2database.com)
* Javassist (org.javassist:javassist:3.16.1-GA - http://www.javassist.org/)
* Javassist (org.javassist:javassist:3.18.1-GA - http://www.javassist.org/)
* Rhino (rhino:js:1.6R7 - http://www.mozilla.org/rhino/)
Public Domain:
@@ -387,7 +404,10 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Reflections (org.reflections:reflections:0.9.9-RC1 - http://code.google.com/p/reflections/reflections/)
* XZ for Java (org.tukaani:xz:1.4 - http://tukaani.org/xz/java.html)
Unknown license:
The JSON License:
* DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:5.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-api-lang)
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:5.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-xmlui-lang)
* JSON in Java (org.json:json:20180130 - https://github.com/douglascrockford/JSON-java)
The PostgreSQL License:
* PostgreSQL JDBC Driver - JDBC 4.1 (org.postgresql:postgresql:42.2.1.jre7 - https://github.com/pgjdbc/pgjdbc-parent-poms/pgjdbc-core-parent/pgjdbc-core-prevjre/postgresql)

31
NOTICE
View File

@@ -1,15 +1,28 @@
Licenses of Third-Party Libraries
=================================
Licensing Notice
DSpace uses third-party libraries which may be distributed under
different licenses than specified in our LICENSE file. Information
about these licenses is detailed in the LICENSES_THIRD_PARTY file at
the root of the source tree. You must agree to the terms of these
licenses, in addition to the DSpace source code license, in order to
use this software.
Fedora Commons joined with the DSpace Foundation and began operating under
Licensing Notices
=================
[July 2019] DuraSpace joined with LYRASIS (another 501(c)3 organization) in July 2019.
LYRASIS holds the copyrights of DuraSpace.
[July 2009] Fedora Commons joined with the DSpace Foundation and began operating under
the new name DuraSpace in July 2009. DuraSpace holds the copyrights of
the DSpace Foundation, Inc.
The DSpace Foundation, Inc. is a 501(c)3 corporation established in July 2007
with a mission to promote and advance the dspace platform enabling management,
access and preservation of digital works. The Foundation was able to transfer
the legal copyright from Hewlett-Packard Company (HP) and Massachusetts
Institute of Technology (MIT) to the DSpace Foundation in October 2007. Many
of the files in the source code may contain a copyright statement stating HP
and MIT possess the copyright, in these instances please note that the copy
[July 2007] The DSpace Foundation, Inc. is a 501(c)3 corporation established in July 2007
with a mission to promote and advance the dspace platform enabling management,
access and preservation of digital works. The Foundation was able to transfer
the legal copyright from Hewlett-Packard Company (HP) and Massachusetts
Institute of Technology (MIT) to the DSpace Foundation in October 2007. Many
of the files in the source code may contain a copyright statement stating HP
and MIT possess the copyright, in these instances please note that the copy
right has transferred to the DSpace foundation, and subsequently to DuraSpace.

25
README
View File

@@ -1,18 +1,16 @@
DSpace version information can be viewed online at
- https://wiki.duraspace.org/display/DSDOC/
- https://wiki.lyrasis.org/display/DSDOC/
Documentation for the most recent stable release(s) may be downloaded
or viewed online at
- http://www.dspace.org/latest-release/
- https://wiki.duraspace.org/display/DSDOC/
- http://sourceforge.net/projects/dspace/files/DSpace%20Stable/
(select a version and read DSpace-Manual.pdf)
- https://wiki.lyrasis.org/display/DSDOC/
Installation instructions are to be found in that documentation.
In addition, a listing of all known contributors to DSpace software can be
found online at:
https://wiki.duraspace.org/display/DSPACE/DSpaceContributors
https://wiki.lyrasis.org/display/DSPACE/DSpaceContributors
Installation instructions for other versions may be different, so you
are encouraged to obtain the appropriate version of the Documentation
@@ -26,20 +24,20 @@ or just:
- git clone git://github.com/DSpace/DSpace.git
Please refer any further problems to the dspace-tech@lists.sourceforge.net
Please refer any further problems to the dspace-tech@googlegroups.com
mailing list.
- http://sourceforge.net/mail/?group_id=19984
- https://groups.google.com/d/forum/dspace-tech
Detailed Issue Tracking for DSpace is done on our JIRA Issue Tracker
Detailed Issue Tracking for DSpace is done in GitHub issues
- https://jira.duraspace.org/browse/DS
- https://github.com/DSpace/DSpace/issues
To contribute to DSpace, please see:
- https://wiki.duraspace.org/display/DSPACE/How+to+Contribute+to+DSpace
- https://wiki.lyrasis.org/display/DSPACE/How+to+Contribute+to+DSpace
For more details about DSpace, including a list of service providers,
@@ -47,8 +45,5 @@ places to seek help, news articles and lists of other users, please see:
- http://www.dspace.org/
DSpace source code licensing information available online at:
- http://www.dspace.org/license/
Copyright (c) 2002-2015, DuraSpace. All rights reserved.
DSpace source code is freely available under a standard [BSD 3-Clause license](https://opensource.org/licenses/BSD-3-Clause).
The full license is available at http://www.dspace.org/license/

View File

@@ -66,14 +66,12 @@ db.password=dspace
#db.username=dspace
#db.password=dspace
# Schema name - if your database contains multiple schemas, you can avoid problems with
# retrieving the definitions of duplicate object names by specifying
# the schema name here that is used for DSpace by uncommenting the following entry
# NOTE: this configuration option is for PostgreSQL only. For Oracle, schema is equivalent
# to user name. DSpace depends on the PostgreSQL understanding of schema. If you are using
# Oracle, just leave this this value blank.
# Schema name - if your database contains multiple schemas, you can avoid
# problems with retrieving the definitions of duplicate object names by
# specifying the schema name that is used for DSpace.
# ORACLE USAGE NOTE: In Oracle, schema is equivalent to "username". This means
# specifying a "db.schema" is often unnecessary (i.e. you can leave it blank),
# UNLESS your Oracle DB Account (in db.username) has access to multiple schemas.
db.schema =
# Maximum number of DB connections in pool

43
docker-compose-cli.yml Normal file
View File

@@ -0,0 +1,43 @@
#
# The contents of this file are subject to the license and copyright
# detailed in the LICENSE and NOTICE files at the root of the source
# tree and available online at
#
# http://www.dspace.org/license/
#
version: "3.7"
services:
dspace-cli:
image: "dspace/dspace-cli:${DSPACE_VER:-dspace-5_x}"
container_name: dspace-cli
build:
context: .
dockerfile: Dockerfile.cli.jdk8
environment:
# Env vars with double underbars in names will be replaced with periods and written to dspace.cfg
# The defaul values for dspace.cfg will be provided here
# __D__ -> -
# __P__ -> .
- dspace__P__dir=/dspace
- db__P__url=jdbc:postgresql://dspacedb:5432/dspace
- dspace__P__hostname=localhost
- dspace__P__baseUrl=http://localhost:8080
- dspace__P__name=DSpace Started with Docker Compose
- solr__P__server=http://dspace:8080/solr
volumes:
- assetstore:/dspace/assetstore
entrypoint: /dspace/bin/parse_env_to_configs.sh
# Any commands passed here will be forwarded to /dspace/bin/dspace by parse_env_to_configs.sh (see its code)
command: help
networks:
- dspacenet
tty: true
stdin_open: true
volumes:
assetstore:
networks:
dspacenet:

64
docker-compose.yml Normal file
View File

@@ -0,0 +1,64 @@
#
# The contents of this file are subject to the license and copyright
# detailed in the LICENSE and NOTICE files at the root of the source
# tree and available online at
#
# http://www.dspace.org/license/
#
version: '3.7'
networks:
dspacenet:
services:
dspace:
container_name: dspace
depends_on:
- dspacedb
image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-dspace-5_x-jdk8-test}"
environment:
# Env vars with double underbars in names will be replaced with periods and written to dspace.cfg
# The defaul values for dspace.cfg will be provided here
# __D__ -> -
# __P__ -> .
- dspace__P__dir=/dspace
- db__P__url=jdbc:postgresql://dspacedb:5432/dspace
- dspace__P__hostname=localhost
- dspace__P__baseUrl=http://localhost:8080
- dspace__P__name=DSpace Started with Docker Compose
- solr__P__server=http://localhost:8080/solr
build:
context: .
dockerfile: Dockerfile.jdk8-test
networks:
dspacenet:
ports:
- published: 8080
target: 8080
stdin_open: true
tty: true
volumes:
- ./dspace/src/main/docker-compose/xmlui.xconf:/dspace/config/xmlui.xconf
- ./dspace/src/main/docker-compose/parse_configs.sh:/dspace/bin/parse_configs.sh
- assetstore:/dspace/assetstore
- solr_authority:/dspace/solr/authority/data
- solr_oai:/dspace/solr/oai/data
- solr_search:/dspace/solr/search/data
- solr_statistics:/dspace/solr/statistics/data
dspacedb:
container_name: dspacedb
environment:
PGDATA: /pgdata
image: dspace/dspace-postgres-pgcrypto
networks:
dspacenet:
stdin_open: true
tty: true
volumes:
- pgdata:/pgdata
volumes:
assetstore:
pgdata:
solr_authority:
solr_oai:
solr_search:
solr_statistics:

View File

@@ -12,7 +12,7 @@
<parent>
<groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId>
<version>5.0</version>
<version>5.11</version>
<relativePath>..</relativePath>
</parent>
@@ -209,7 +209,7 @@
</execution>
</executions>
</plugin>
<!-- FileWeaver plugin is in charge of initializing & "weaving" together
the dspace.cfg file to be used by the Unit Testing environment.
It weaves two files, the default 'dspace.cfg' and 'dspace.cfg.more',
@@ -528,9 +528,9 @@
<version>1.8</version>
</dependency>
<dependency>
<groupId>com.maxmind.geoip</groupId>
<artifactId>geoip-api</artifactId>
<version>1.2.11</version>
<groupId>com.maxmind.geoip2</groupId>
<artifactId>geoip2</artifactId>
<version>2.11.0</version>
</dependency>
<dependency>
<groupId>org.apache.ant</groupId>
@@ -569,10 +569,10 @@
<dependency>
<groupId>postgresql</groupId>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
@@ -590,7 +590,7 @@
<artifactId>im4java</artifactId>
<version>1.4.0</version>
</dependency>
<!-- Flyway DB API (flywaydb.org) is used to manage DB upgrades automatically. -->
<dependency>
<groupId>org.flywaydb</groupId>
@@ -603,13 +603,21 @@
<groupId>com.google.apis</groupId>
<artifactId>google-api-services-analytics</artifactId>
</dependency>
<dependency>
<groupId>com.google.api-client</groupId>
<artifactId>google-api-client</artifactId>
</dependency>
<dependency>
<groupId>com.google.http-client</groupId>
<artifactId>google-http-client</artifactId>
</dependency>
<dependency>
<groupId>com.google.http-client</groupId>
<artifactId>google-http-client-jackson2</artifactId>
</dependency>
<dependency>
<groupId>com.google.oauth-client</groupId>
<artifactId>google-oauth-client-jetty</artifactId>
<artifactId>google-oauth-client</artifactId>
</dependency>
<!-- FindBugs -->
<dependency>
@@ -631,6 +639,66 @@
<version>1</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>org.apache.ws.commons.axiom</groupId>
<artifactId>axiom-impl</artifactId>
<!-- NOTE: SWORDv2 needs 1.2.14, required by Abdera: https://abdera.apache.org/ -->
<version>1.2.14</version>
</dependency>
<dependency>
<groupId>org.apache.ws.commons.axiom</groupId>
<artifactId>axiom-api</artifactId>
<!-- NOTE: SWORDv2 needs 1.2.14, required by Abdera: https://abdera.apache.org/ -->
<version>1.2.14</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-client</artifactId>
<version>2.22.1</version>
</dependency>
<!-- S3 -->
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-s3</artifactId>
<version>1.10.50</version>
<exclusions>
<exclusion>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- S3 also wanted jackson... -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
</dependency>
<!-- For ORCID v2 integration -->
<dependency>
<groupId>org.dspace</groupId>
<artifactId>orcid-jaxb-api</artifactId>
<version>2.1.0</version>
</dependency>
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20180130</version>
</dependency>
</dependencies>
</project>

View File

@@ -113,8 +113,8 @@ public class CommunityFiliator
CommunityFiliator filiator = new CommunityFiliator();
Context c = new Context();
// ve are superuser!
c.setIgnoreAuthorization(true);
// we are superuser!
c.turnOffAuthorisationSystem();
try
{

View File

@@ -228,7 +228,7 @@ public final class CreateAdministrator
{
// Of course we aren't an administrator yet so we need to
// circumvent authorisation
context.setIgnoreAuthorization(true);
context.turnOffAuthorisationSystem();
// Find administrator group
Group admins = Group.find(context, 1);

View File

@@ -88,7 +88,7 @@ public class MetadataExporter
{
// create a context
Context context = new Context();
context.setIgnoreAuthorization(true);
context.turnOffAuthorisationSystem();
OutputFormat xmlFormat = new OutputFormat(Method.XML, "UTF-8", true);
xmlFormat.setLineWidth(120);

View File

@@ -187,7 +187,7 @@ public class DSpaceCSV implements Serializable
StringBuilder lineBuilder = new StringBuilder();
String lineRead;
while (StringUtils.isNotBlank(lineRead = input.readLine()))
while ((lineRead = input.readLine()) != null)
{
if (lineBuilder.length() > 0) {
// Already have a previously read value - add this line

View File

@@ -1169,10 +1169,8 @@ public class MetadataImport
*/
private static boolean isAuthorityControlledField(String md)
{
int pos = md.indexOf("[");
String mdf = (pos > -1 ? md.substring(0, pos) : md);
pos = md.indexOf(":");
mdf = (pos > -1 ? md.substring(pos+1) : md);
String mdf = StringUtils.substringAfter(md, ":");
mdf = StringUtils.substringBefore(mdf, "[");
return authorityControlled.contains(mdf);
}

View File

@@ -89,16 +89,16 @@ public class MetadataImportInvalidHeadingException extends Exception
{
if (type == SCHEMA)
{
return "Unknown metadata schema in row " + column + ": " + badHeading;
return "Unknown metadata schema in column " + column + ": " + badHeading;
} else if (type == ELEMENT)
{
return "Unknown metadata element in row " + column + ": " + badHeading;
return "Unknown metadata element in column " + column + ": " + badHeading;
} else if (type == MISSING)
{
return "Row with missing header: Row " + column;
return "Row with missing header: column " + column;
} else
{
return "Bad metadata declaration in row " + column + ": " + badHeading;
return "Bad metadata declaration in column" + column + ": " + badHeading;
}
}
}
}

View File

@@ -209,7 +209,7 @@ public class ItemExport
}
Context c = new Context();
c.setIgnoreAuthorization(true);
c.turnOffAuthorisationSystem();
if (myType == Constants.ITEM)
{

View File

@@ -284,7 +284,7 @@ public class ItemImport
if (line.hasOption('z'))
{
zip = true;
zipfilename = sourcedir + System.getProperty("file.separator") + line.getOptionValue('z');
zipfilename = line.getOptionValue('z');
}
//By default assume collections will be given on the command line
@@ -733,7 +733,7 @@ public class ItemImport
{
clist = mycollections;
}
addItem(c, mycollections, sourceDir, dircontents[i], mapOut, template);
addItem(c, clist, sourceDir, dircontents[i], mapOut, template);
System.out.println(i + " " + dircontents[i]);
c.clearCache();
}
@@ -1108,6 +1108,10 @@ public class ItemImport
{
value = "";
}
else
{
value = value.trim();
}
// //getElementData(n, "element");
String element = getAttributeValue(n, "element");
String qualifier = getAttributeValue(n, "qualifier"); //NodeValue();
@@ -1129,8 +1133,8 @@ public class ItemImport
{
qualifier = null;
}
if (!isTest)
// only add metadata if it is no test and there is an real value
if (!isTest && !value.equals(""))
{
i.addMetadata(schema, element, qualifier, language, value);
}
@@ -1999,12 +2003,14 @@ public class ItemImport
if (destinationDir == null){
destinationDir = tempWorkDir;
}
log.debug("Using directory " + destinationDir + " for zip extraction. (destDir arg is " + destDir +
", tempWorkDir is " + tempWorkDir + ")");
File tempdir = new File(destinationDir);
if (!tempdir.isDirectory())
{
log.error("'" + ConfigurationManager.getProperty("org.dspace.app.itemexport.work.dir") +
"' as defined by the key 'org.dspace.app.itemexport.work.dir' in dspace.cfg " +
log.error("'" + ConfigurationManager.getProperty("org.dspace.app.batchitemimport.work.dir") +
"' as defined by the key 'org.dspace.app.batchitemimport.work.dir' in dspace.cfg " +
"is not a valid directory");
}
@@ -2012,9 +2018,15 @@ public class ItemImport
{
log.error("Unable to create temporary directory: " + tempdir.getAbsolutePath());
}
String sourcedir = destinationDir + System.getProperty("file.separator") + zipfile.getName();
String zipDir = destinationDir + System.getProperty("file.separator") + zipfile.getName() + System.getProperty("file.separator");
if(!destinationDir.endsWith(System.getProperty("file.separator"))) {
destinationDir += System.getProperty("file.separator");
}
String sourcedir = destinationDir + zipfile.getName();
String zipDir = destinationDir + zipfile.getName() + System.getProperty("file.separator");
log.debug("zip directory to use is " + zipDir);
// 3
String sourceDirForZip = sourcedir;
@@ -2024,11 +2036,26 @@ public class ItemImport
while (entries.hasMoreElements())
{
entry = entries.nextElement();
// Check that the true path to extract files is never outside allowed temp directories
// without creating any actual files on disk
log.debug("Inspecting entry name: " + entry.getName() + " for path traversal security");
File potentialExtract = new File(zipDir + entry.getName());
String canonicalPath = potentialExtract.getCanonicalPath();
log.debug("Canonical path to potential File is " + canonicalPath);
if(!canonicalPath.startsWith(zipDir)) {
log.error("Rejecting zip file: " + zipfile.getName() + " as it contains an entry that would be extracted " +
"outside the temporary unzip directory: " + canonicalPath);
throw new IOException("Error extracting " + zipfile + ": Canonical path of zip entry: " +
entry.getName() + " (" + canonicalPath + ") does not start with permissible temp " +
"unzip directory (" + destinationDir + ")");
}
if (entry.isDirectory())
{
if (!new File(zipDir + entry.getName()).mkdir())
{
// Log error and throw IOException if a directory entry could not be created
File newDir = new File(zipDir + entry.getName());
if (!newDir.mkdirs()) {
log.error("Unable to create contents directory: " + zipDir + entry.getName());
throw new IOException("Unable to create contents directory: " + zipDir + entry.getName());
}
}
else
@@ -2070,6 +2097,7 @@ public class ItemImport
byte[] buffer = new byte[1024];
int len;
InputStream in = zf.getInputStream(entry);
log.debug("Reading " + zipDir + entry.getName() + " into InputStream");
BufferedOutputStream out = new BufferedOutputStream(
new FileOutputStream(zipDir + entry.getName()));
while((len = in.read(buffer)) >= 0)
@@ -2148,7 +2176,7 @@ public class ItemImport
context = new Context();
eperson = EPerson.find(context, oldEPerson.getID());
context.setCurrentUser(eperson);
context.setIgnoreAuthorization(true);
context.turnOffAuthorisationSystem();
boolean isResume = theResumeDir!=null;

View File

@@ -351,7 +351,7 @@ public class ItemUpdate {
context = new Context();
iu.setEPerson(context, iu.eperson);
context.setIgnoreAuthorization(true);
context.turnOffAuthorisationSystem();
HANDLE_PREFIX = ConfigurationManager.getProperty("handle.canonical.prefix");
if (HANDLE_PREFIX == null || HANDLE_PREFIX.length() == 0)
@@ -362,19 +362,20 @@ public class ItemUpdate {
iu.processArchive(context, sourcedir, itemField, metadataIndexName, alterProvenance, isTest);
context.complete(); // complete all transactions
context.setIgnoreAuthorization(false);
}
catch (Exception e)
{
if (context != null && context.isValid())
{
context.abort();
context.setIgnoreAuthorization(false);
}
e.printStackTrace();
pr(e.toString());
status = 1;
}
finally {
context.restoreAuthSystemState();
}
if (isTest)
{

View File

@@ -11,6 +11,7 @@ import java.io.FileNotFoundException;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.List;
import java.util.TreeMap;
import org.dspace.core.ConfigurationManager;
import org.dspace.servicemanager.DSpaceKernelImpl;
import org.dspace.servicemanager.DSpaceKernelInit;
@@ -275,9 +276,21 @@ public class ScriptLauncher
*/
private static void display()
{
// List all command elements
List<Element> commands = commandConfigs.getRootElement().getChildren("command");
System.out.println("Usage: dspace [command-name] {parameters}");
// Sort the commands by name.
// We cannot just use commands.sort() because it tries to remove and
// reinsert Elements within other Elements, and that doesn't work.
TreeMap<String, Element> sortedCommands = new TreeMap<>();
for (Element command : commands)
{
sortedCommands.put(command.getChild("name").getValue(), command);
}
// Display the sorted list
System.out.println("Usage: dspace [command-name] {parameters}");
for (Element command : sortedCommands.values())
{
System.out.println(" - " + command.getChild("name").getValue() +
": " + command.getChild("description").getValue());

View File

@@ -7,9 +7,10 @@
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.nio.file.Files;
/**
@@ -30,9 +31,24 @@ public class ImageMagickImageThumbnailFilter extends ImageMagickThumbnailFilter
throws Exception
{
File f = inputStreamToTempFile(source, "imthumb", ".tmp");
File f2 = getThumbnailFile(f);
return new FileInputStream(f2);
}
File f2 = null;
try
{
f2 = getThumbnailFile(f);
byte[] bytes = Files.readAllBytes(f2.toPath());
return new ByteArrayInputStream(bytes);
}
finally
{
//noinspection ResultOfMethodCallIgnored
f.delete();
if (f2 != null)
{
//noinspection ResultOfMethodCallIgnored
f2.delete();
}
}
}
}

View File

@@ -7,24 +7,40 @@
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.nio.file.Files;
public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter {
public InputStream getDestinationStream(InputStream source)
throws Exception
{
File f = inputStreamToTempFile(source, "impdfthumb", ".pdf");
File f2 = getImageFile(f, 0);
File f3 = getThumbnailFile(f2);
return new FileInputStream(f3);
File f2 = null;
File f3 = null;
try
{
f2 = getImageFile(f, 0);
f3 = getThumbnailFile(f2);
byte[] bytes = Files.readAllBytes(f3.toPath());
return new ByteArrayInputStream(bytes);
}
finally
{
//noinspection ResultOfMethodCallIgnored
f.delete();
if (f2 != null)
{
//noinspection ResultOfMethodCallIgnored
f2.delete();
}
if (f3 != null)
{
//noinspection ResultOfMethodCallIgnored
f3.delete();
}
}
}
public static final String[] PDF = {"Adobe PDF"};
public String[] getInputMIMETypes()
{
return PDF;
}
}

View File

@@ -23,6 +23,7 @@ import org.dspace.content.Bundle;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.im4java.core.ConvertCmd;
import org.im4java.core.Info;
import org.im4java.core.IM4JavaException;
import org.im4java.core.IMOperation;
import org.im4java.process.ProcessStarter;
@@ -34,161 +35,163 @@ import org.dspace.core.ConfigurationManager;
* thumbnail.maxwidth, thumbnail.maxheight, the size we want our thumbnail to be
* no bigger than. Creates only JPEGs.
*/
public abstract class ImageMagickThumbnailFilter extends MediaFilter implements SelfRegisterInputFormats
{
private static int width = 180;
private static int height = 120;
static String bitstreamDescription = "IM Thumbnail";
static final String defaultPattern = "Generated Thumbnail";
static Pattern replaceRegex = Pattern.compile(defaultPattern);
static {
String pre = ImageMagickThumbnailFilter.class.getName();
String s = ConfigurationManager.getProperty(pre + ".ProcessStarter");
ProcessStarter.setGlobalSearchPath(s);
width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width);
height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height);
String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription");
if (description != null) {
bitstreamDescription = description;
}
try {
String patt = ConfigurationManager.getProperty(pre + ".replaceRegex");
replaceRegex = Pattern.compile(patt == null ? defaultPattern : patt);
} catch(PatternSyntaxException e) {
System.err.println("Invalid thumbnail replacement pattern: "+e.getMessage());
}
}
public ImageMagickThumbnailFilter() {
}
public String getFilteredName(String oldFilename)
{
return oldFilename + ".jpg";
}
public abstract class ImageMagickThumbnailFilter extends MediaFilter {
private static int width = 180;
private static int height = 120;
private static boolean flatten = true;
static String bitstreamDescription = "IM Thumbnail";
static final String defaultPattern = "Generated Thumbnail";
static Pattern replaceRegex = Pattern.compile(defaultPattern);
static String cmyk_profile;
static String srgb_profile;
/**
* @return String bundle name
*
*/
public String getBundleName()
{
return "THUMBNAIL";
}
static {
String pre = ImageMagickThumbnailFilter.class.getName();
String s = ConfigurationManager.getProperty(pre + ".ProcessStarter");
ProcessStarter.setGlobalSearchPath(s);
width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width);
height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height);
flatten = ConfigurationManager.getBooleanProperty(pre + ".flatten", flatten);
String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription");
cmyk_profile = ConfigurationManager.getProperty(pre + ".cmyk_profile");
srgb_profile = ConfigurationManager.getProperty(pre + ".srgb_profile");
if (description != null) {
bitstreamDescription = description;
}
try {
String patt = ConfigurationManager.getProperty(pre + ".replaceRegex");
replaceRegex = Pattern.compile(patt == null ? defaultPattern : patt);
} catch (PatternSyntaxException e) {
System.err.println("Invalid thumbnail replacement pattern: " + e.getMessage());
}
/**
* @return String bitstreamformat
*/
public String getFormatString()
{
return "JPEG";
}
/**
* @return String bitstreamDescription
*/
public String getDescription()
{
return bitstreamDescription;
}
public static File inputStreamToTempFile(InputStream source, String prefix, String suffix) throws IOException {
File f = File.createTempFile(prefix, suffix);
f.deleteOnExit();
FileOutputStream fos = new FileOutputStream(f);
byte[] buffer = new byte[1024];
int len = source.read(buffer);
while (len != -1) {
fos.write(buffer, 0, len);
len = source.read(buffer);
}
fos.close();
return f;
}
public static File getThumbnailFile(File f) throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
op.addImage(f.getAbsolutePath());
op.thumbnail(width, height);
op.addImage(f2.getAbsolutePath());
if (MediaFilterManager.isVerbose) {
System.out.println("IM Thumbnail Param: "+op);
}
cmd.run(op);
return f2;
}
public static File getImageFile(File f, int page) throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
String s = "[" + page + "]";
op.addImage(f.getAbsolutePath()+s);
op.addImage(f2.getAbsolutePath());
if (MediaFilterManager.isVerbose) {
System.out.println("IM Image Param: "+op);
public ImageMagickThumbnailFilter() {
}
cmd.run(op);
return f2;
}
public boolean preProcessBitstream(Context c, Item item, Bitstream source)
throws Exception
{
String nsrc = source.getName();
for(Bundle b: item.getBundles("THUMBNAIL")) {
for(Bitstream bit: b.getBitstreams()) {
String n = bit.getName();
if (n != null) {
if (nsrc != null) {
if (!n.startsWith(nsrc)) continue;
}
}
String description = bit.getDescription();
//If anything other than a generated thumbnail is found, halt processing
if (description != null) {
if (replaceRegex.matcher(description).matches()) {
if (MediaFilterManager.isVerbose) {
System.out.println(description + " " + nsrc + " matches pattern and is replacable.");
}
continue;
}
if (description.equals(bitstreamDescription)) {
if (MediaFilterManager.isVerbose) {
System.out.println(bitstreamDescription + " " + nsrc + " is replacable.");
}
continue;
}
}
System.out.println("Custom Thumbnail exists for " + nsrc + " for item " + item.getHandle() + ". Thumbnail will not be generated. ");
return false;
}
}
return true; //assume that the thumbnail is a custom one
}
public String[] getInputMIMETypes()
{
return ImageIO.getReaderMIMETypes();
}
public String getFilteredName(String oldFilename) {
return oldFilename + ".jpg";
}
public String[] getInputDescriptions()
{
return null;
}
/**
* @return String bundle name
*
*/
public String getBundleName() {
return "THUMBNAIL";
}
/**
* @return String bitstreamformat
*/
public String getFormatString() {
return "JPEG";
}
/**
* @return String bitstreamDescription
*/
public String getDescription() {
return bitstreamDescription;
}
public static File inputStreamToTempFile(InputStream source, String prefix, String suffix) throws IOException {
File f = File.createTempFile(prefix, suffix);
f.deleteOnExit();
FileOutputStream fos = new FileOutputStream(f);
byte[] buffer = new byte[1024];
int len = source.read(buffer);
while (len != -1) {
fos.write(buffer, 0, len);
len = source.read(buffer);
}
fos.close();
return f;
}
public static File getThumbnailFile(File f) throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
op.autoOrient();
op.addImage(f.getAbsolutePath());
op.thumbnail(width, height);
op.addImage(f2.getAbsolutePath());
if (MediaFilterManager.isVerbose) {
System.out.println("IM Thumbnail Param: " + op);
}
cmd.run(op);
return f2;
}
public static File getImageFile(File f, int page) throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
String s = "[" + page + "]";
op.addImage(f.getAbsolutePath() + s);
if (flatten) {
op.flatten();
}
// PDFs using the CMYK color system can be handled specially if
// profiles are defined
if (cmyk_profile != null && srgb_profile != null) {
Info imageInfo = new Info(f.getAbsolutePath() + s, true);
String imageClass = imageInfo.getImageClass();
if (imageClass.contains("CMYK")) {
op.profile(cmyk_profile);
op.profile(srgb_profile);
}
}
op.addImage(f2.getAbsolutePath());
if (MediaFilterManager.isVerbose) {
System.out.println("IM Image Param: " + op);
}
cmd.run(op);
return f2;
}
public boolean preProcessBitstream(Context c, Item item, Bitstream source) throws Exception {
String nsrc = source.getName();
for (Bundle b : item.getBundles("THUMBNAIL")) {
for (Bitstream bit : b.getBitstreams()) {
String n = bit.getName();
if (n != null) {
if (nsrc != null) {
if (!n.startsWith(nsrc))
continue;
}
}
String description = bit.getDescription();
// If anything other than a generated thumbnail
// is found, halt processing
if (description != null) {
if (replaceRegex.matcher(description).matches()) {
if (MediaFilterManager.isVerbose) {
System.out.println(description + " " + nsrc
+ " matches pattern and is replacable.");
}
continue;
}
if (description.equals(bitstreamDescription)) {
if (MediaFilterManager.isVerbose) {
System.out.println(bitstreamDescription + " " + nsrc
+ " is replacable.");
}
continue;
}
}
System.out.println("Custom Thumbnail exists for " + nsrc + " for item "
+ item.getHandle() + ". Thumbnail will not be generated. ");
return false;
}
}
return true; // assume that the thumbnail is a custom one
}
public String[] getInputExtensions()
{
return ImageIO.getReaderFileSuffixes();
}
}

View File

@@ -33,7 +33,7 @@ import java.util.zip.GZIPOutputStream;
* }
* g.finish();
* </pre>
*
*
* @author Robert Tansley
*/
public abstract class AbstractGenerator
@@ -59,7 +59,7 @@ public abstract class AbstractGenerator
/**
* Initialize this generator to write to the given directory. This must be
* called by any subclass constructor.
*
*
* @param outputDirIn
* directory to write sitemap files to
*/
@@ -73,7 +73,7 @@ public abstract class AbstractGenerator
/**
* Start writing a new sitemap file.
*
*
* @throws IOException
* if an error occurs creating the file
*/
@@ -97,7 +97,7 @@ public abstract class AbstractGenerator
/**
* Add the given URL to the sitemap.
*
*
* @param url
* Full URL to add
* @param lastMod
@@ -129,7 +129,7 @@ public abstract class AbstractGenerator
/**
* Finish with the current sitemap file.
*
*
* @throws IOException
* if an error occurs writing
*/
@@ -144,15 +144,18 @@ public abstract class AbstractGenerator
* Complete writing sitemap files and write the index files. This is invoked
* when all calls to {@link AbstractGenerator#addURL(String, Date)} have
* been completed, and invalidates the generator.
*
*
* @return number of sitemap files written.
*
*
* @throws IOException
* if an error occurs writing
*/
public int finish() throws IOException
{
closeCurrentFile();
if (null != currentOutput)
{
closeCurrentFile();
}
OutputStream fo = new FileOutputStream(new File(outputDir,
getIndexFilename()));
@@ -165,13 +168,13 @@ public abstract class AbstractGenerator
PrintStream out = new PrintStream(fo);
writeIndex(out, fileCount);
out.close();
return fileCount;
}
/**
* Return marked-up text to be included in a sitemap about a given URL.
*
*
* @param url
* URL to add information about
* @param lastMod
@@ -183,14 +186,14 @@ public abstract class AbstractGenerator
/**
* Return the boilerplate at the top of a sitemap file.
*
*
* @return The boilerplate markup.
*/
public abstract String getLeadingBoilerPlate();
/**
* Return the boilerplate at the end of a sitemap file.
*
*
* @return The boilerplate markup.
*/
public abstract String getTrailingBoilerPlate();
@@ -198,7 +201,7 @@ public abstract class AbstractGenerator
/**
* Return the maximum size in bytes that an individual sitemap file should
* be.
*
*
* @return the size in bytes.
*/
public abstract int getMaxSize();
@@ -206,7 +209,7 @@ public abstract class AbstractGenerator
/**
* Return the maximum number of URLs that an individual sitemap file should
* contain.
*
*
* @return the maximum number of URLs.
*/
public abstract int getMaxURLs();
@@ -214,7 +217,7 @@ public abstract class AbstractGenerator
/**
* Return whether the written sitemap files and index should be
* GZIP-compressed.
*
*
* @return {@code true} if GZIP compression should be used, {@code false}
* otherwise.
*/
@@ -222,7 +225,7 @@ public abstract class AbstractGenerator
/**
* Return the filename a sitemap at the given index should be stored at.
*
*
* @param number
* index of the sitemap file (zero is first).
* @return the filename to write the sitemap to.
@@ -231,14 +234,14 @@ public abstract class AbstractGenerator
/**
* Get the filename the index should be written to.
*
*
* @return the filename of the index.
*/
public abstract String getIndexFilename();
/**
* Write the index file.
*
*
* @param output
* stream to write the index to
* @param sitemapCount

View File

@@ -92,7 +92,7 @@ public class CreateStatReport {
// create context as super user
context = new Context();
context.setIgnoreAuthorization(true);
context.turnOffAuthorisationSystem();
//get paths to directories
outputLogDirectory = ConfigurationManager.getProperty("log.dir") + File.separator;

View File

@@ -215,7 +215,7 @@ public class LogAnalyser
// create context as super user
Context context = new Context();
context.setIgnoreAuthorization(true);
context.turnOffAuthorisationSystem();
// set up our command line variables
String myLogDir = null;

View File

@@ -151,7 +151,7 @@ public class ReportGenerator
{
// create context as super user
Context context = new Context();
context.setIgnoreAuthorization(true);
context.turnOffAuthorisationSystem();
String myFormat = null;
String myInput = null;

View File

@@ -7,37 +7,31 @@
*/
package org.dspace.app.util;
import java.sql.SQLException;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.content.*;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.Item;
import org.dspace.content.Metadatum;
import org.dspace.core.ConfigurationManager;
import java.io.File;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.handle.HandleManager;
import org.jdom.Element;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.sql.SQLException;
import java.util.*;
import java.util.Map.Entry;
/**
* Configuration and mapping for Google Scholar output metadata
* @author Sands Fish
@@ -125,6 +119,7 @@ public class GoogleMetadata
private static final int ALL_FIELDS_IN_OPTION = 2;
private Context ourContext;
// Load configured fields from google-metadata.properties
static
{
@@ -216,7 +211,11 @@ public class GoogleMetadata
// Hold onto the item in case we need to refresh a stale parse
this.item = item;
itemURL = HandleManager.resolveToURL(context, item.getHandle());
ourContext=context;
EPerson currentUser = ourContext.getCurrentUser();
ourContext.setCurrentUser(null);
parseItem();
ourContext.setCurrentUser(currentUser);
}
/**
@@ -757,16 +756,17 @@ public class GoogleMetadata
}
/**
* Fetch all metadata mappings
*
* Fetch retaining the order of the values for any given key in which they
* where added (like authors).
*
* Usage: GoogleMetadata gmd = new GoogleMetadata(item); for(Entry<String,
* String> mapping : googlemd.getMappings()) { ... }
*
* @return Iterable of metadata fields mapped to Google-formatted values
*/
public Set<Entry<String, String>> getMappings()
public Collection<Entry<String, String>> getMappings()
{
return new HashSet<>(metadataMappings.entries());
return metadataMappings.entries();
}
/**
@@ -1041,7 +1041,6 @@ public class GoogleMetadata
*/
private Bitstream findLinkableFulltext(Item item) throws SQLException {
Bitstream bestSoFar = null;
int bitstreamCount = 0;
Bundle[] contentBundles = item.getBundles("ORIGINAL");
for (Bundle bundle : contentBundles) {
int primaryBitstreamId = bundle.getPrimaryBitstreamID();
@@ -1050,16 +1049,16 @@ public class GoogleMetadata
if (candidate.getID() == primaryBitstreamId) { // is primary -> use this one
if (isPublic(candidate)) {
return candidate;
}
} else
{
if (bestSoFar == null && isPublic(candidate)) { //if bestSoFar is null but the candidate is not public you don't use it and try to find another
bestSoFar = candidate;
}
}
} else if (bestSoFar == null) {
bestSoFar = candidate;
}
bitstreamCount++;
}
}
if (bitstreamCount > 1 || !isPublic(bestSoFar)) {
bestSoFar = null;
}
return bestSoFar;
}
@@ -1069,16 +1068,10 @@ public class GoogleMetadata
return false;
}
boolean result = false;
Context context = null;
try {
context = new Context();
result = AuthorizeManager.authorizeActionBoolean(context, bitstream, Constants.READ, true);
result = AuthorizeManager.authorizeActionBoolean(ourContext, bitstream, Constants.READ, true);
} catch (SQLException e) {
log.error("Cannot determine whether bitstream is public, assuming it isn't. bitstream_id=" + bitstream.getID(), e);
} finally {
if (context != null) {
context.abort();
}
}
return result;
}

View File

@@ -279,11 +279,11 @@ public class LDAPAuthentication
{
log.info(LogManager.getHeader(context,
"type=ldap-login", "type=ldap_but_already_email"));
context.setIgnoreAuthorization(true);
context.turnOffAuthorisationSystem();
eperson.setNetid(netid.toLowerCase());
eperson.update();
context.commit();
context.setIgnoreAuthorization(false);
context.restoreAuthSystemState();
context.setCurrentUser(eperson);
// assign user to groups based on ldap dn
@@ -298,7 +298,7 @@ public class LDAPAuthentication
// TEMPORARILY turn off authorisation
try
{
context.setIgnoreAuthorization(true);
context.turnOffAuthorisationSystem();
eperson = EPerson.create(context);
if (StringUtils.isNotEmpty(email))
{
@@ -332,7 +332,7 @@ public class LDAPAuthentication
}
finally
{
context.setIgnoreAuthorization(false);
context.restoreAuthSystemState();
}
log.info(LogManager.getHeader(context, "authenticate",
@@ -354,7 +354,7 @@ public class LDAPAuthentication
}
finally
{
context.setIgnoreAuthorization(false);
context.restoreAuthSystemState();
}
}
}

View File

@@ -7,11 +7,7 @@
*/
package org.dspace.authenticate;
import java.sql.SQLException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.ConfigurationManager;
@@ -20,6 +16,10 @@ import org.dspace.core.LogManager;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.sql.SQLException;
/**
* A stackable authentication method
* based on the DSpace internal "EPerson" database.
@@ -128,7 +128,7 @@ public class PasswordAuthentication
// ensures they are password users
try
{
if (context.getCurrentUser().getPasswordHash() != null && !context.getCurrentUser().getPasswordHash().toString().equals(""))
if (context.getCurrentUser() != null && context.getCurrentUser().getPasswordHash()!=null && StringUtils.isNotBlank(context.getCurrentUser().getPasswordHash().toString()))
{
String groupName = ConfigurationManager.getProperty("authentication-password", "login.specialgroup");
if ((groupName != null) && (!groupName.trim().equals("")))
@@ -149,7 +149,7 @@ public class PasswordAuthentication
}
}
catch (Exception e) {
// The user is not a password user, so we don't need to worry about them
log.error(LogManager.getHeader(context,"getSpecialGroups",""),e);
}
return new int[0];
}

View File

@@ -612,7 +612,7 @@ public class X509Authentication implements AuthenticationMethod
"from=x.509, email=" + email));
// TEMPORARILY turn off authorisation
context.setIgnoreAuthorization(true);
context.turnOffAuthorisationSystem();
eperson = EPerson.create(context);
eperson.setEmail(email);
eperson.setCanLogIn(true);
@@ -620,7 +620,7 @@ public class X509Authentication implements AuthenticationMethod
eperson);
eperson.update();
context.commit();
context.setIgnoreAuthorization(false);
context.restoreAuthSystemState();
context.setCurrentUser(eperson);
setSpecialGroupsFlag(request, email);
return SUCCESS;

View File

@@ -0,0 +1,20 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority;
import java.util.List;
/**
* @author Jonas Van Goolen (jonas at atmire dot com)
*/
public interface SolrAuthorityInterface {
List<AuthorityValue> queryAuthorities(String text, int max);
AuthorityValue queryAuthorityID(String id);
}

View File

@@ -1,86 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.orcid.model.Bio;
import org.dspace.authority.orcid.model.Work;
import org.dspace.authority.orcid.xml.XMLtoBio;
import org.dspace.authority.orcid.xml.XMLtoWork;
import org.dspace.authority.rest.RestSource;
import org.apache.log4j.Logger;
import org.dspace.utils.DSpace;
import org.w3c.dom.Document;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Orcid extends RestSource {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(Orcid.class);
private static Orcid orcid;
public static Orcid getOrcid() {
if (orcid == null) {
orcid = new DSpace().getServiceManager().getServiceByName("OrcidSource", Orcid.class);
}
return orcid;
}
private Orcid(String url) {
super(url);
}
public Bio getBio(String id) {
Document bioDocument = restConnector.get(id + "/orcid-bio");
XMLtoBio converter = new XMLtoBio();
Bio bio = converter.convert(bioDocument).get(0);
bio.setOrcid(id);
return bio;
}
public List<Work> getWorks(String id) {
Document document = restConnector.get(id + "/orcid-works");
XMLtoWork converter = new XMLtoWork();
return converter.convert(document);
}
public List<Bio> queryBio(String name, int start, int rows) {
Document bioDocument = restConnector.get("search/orcid-bio?q=" + URLEncoder.encode("\"" + name + "\"") + "&start=" + start + "&rows=" + rows);
XMLtoBio converter = new XMLtoBio();
return converter.convert(bioDocument);
}
@Override
public List<AuthorityValue> queryAuthorities(String text, int max) {
List<Bio> bios = queryBio(text, 0, max);
List<AuthorityValue> authorities = new ArrayList<AuthorityValue>();
for (Bio bio : bios) {
authorities.add(OrcidAuthorityValue.create(bio));
}
return authorities;
}
@Override
public AuthorityValue queryAuthorityID(String id) {
Bio bio = getBio(id);
return OrcidAuthorityValue.create(bio);
}
}

View File

@@ -1,316 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.AuthorityValueGenerator;
import org.dspace.authority.PersonAuthorityValue;
import org.dspace.authority.orcid.model.Bio;
import org.dspace.authority.orcid.model.BioExternalIdentifier;
import org.dspace.authority.orcid.model.BioName;
import org.dspace.authority.orcid.model.BioResearcherUrl;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrInputDocument;
import java.util.*;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class OrcidAuthorityValue extends PersonAuthorityValue {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(OrcidAuthorityValue.class);
private String orcid_id;
private Map<String, List<String>> otherMetadata = new HashMap<String, List<String>>();
private boolean update; // used in setValues(Bio bio)
/**
* Creates an instance of OrcidAuthorityValue with only uninitialized fields.
* This is meant to be filled in with values from an existing record.
* To create a brand new OrcidAuthorityValue, use create()
*/
public OrcidAuthorityValue() {
}
public OrcidAuthorityValue(SolrDocument document) {
super(document);
}
public String getOrcid_id() {
return orcid_id;
}
public void setOrcid_id(String orcid_id) {
this.orcid_id = orcid_id;
}
public Map<String, List<String>> getOtherMetadata() {
return otherMetadata;
}
public void addOtherMetadata(String label, String data) {
List<String> strings = otherMetadata.get(label);
if (strings == null) {
strings = new ArrayList<String>();
}
strings.add(data);
otherMetadata.put(label, strings);
}
@Override
public SolrInputDocument getSolrInputDocument() {
SolrInputDocument doc = super.getSolrInputDocument();
if (StringUtils.isNotBlank(getOrcid_id())) {
doc.addField("orcid_id", getOrcid_id());
}
for (String t : otherMetadata.keySet()) {
List<String> data = otherMetadata.get(t);
for (String data_entry : data) {
doc.addField("label_" + t, data_entry);
}
}
return doc;
}
@Override
public void setValues(SolrDocument document) {
super.setValues(document);
this.orcid_id = String.valueOf(document.getFieldValue("orcid_id"));
otherMetadata = new HashMap<String, List<String>>();
for (String fieldName : document.getFieldNames()) {
String labelPrefix = "label_";
if (fieldName.startsWith(labelPrefix)) {
String label = fieldName.substring(labelPrefix.length());
List<String> list = new ArrayList<String>();
Collection<Object> fieldValues = document.getFieldValues(fieldName);
for (Object o : fieldValues) {
list.add(String.valueOf(o));
}
otherMetadata.put(label, list);
}
}
}
public static OrcidAuthorityValue create() {
OrcidAuthorityValue orcidAuthorityValue = new OrcidAuthorityValue();
orcidAuthorityValue.setId(UUID.randomUUID().toString());
orcidAuthorityValue.updateLastModifiedDate();
orcidAuthorityValue.setCreationDate(new Date());
return orcidAuthorityValue;
}
/**
* Create an authority based on a given orcid bio
*/
public static OrcidAuthorityValue create(Bio bio) {
OrcidAuthorityValue authority = OrcidAuthorityValue.create();
authority.setValues(bio);
return authority;
}
public boolean setValues(Bio bio) {
BioName name = bio.getName();
if (updateValue(bio.getOrcid(), getOrcid_id())) {
setOrcid_id(bio.getOrcid());
}
if (updateValue(name.getFamilyName(), getLastName())) {
setLastName(name.getFamilyName());
}
if (updateValue(name.getGivenNames(), getFirstName())) {
setFirstName(name.getGivenNames());
}
if (StringUtils.isNotBlank(name.getCreditName())) {
if (!getNameVariants().contains(name.getCreditName())) {
addNameVariant(name.getCreditName());
update = true;
}
}
for (String otherName : name.getOtherNames()) {
if (!getNameVariants().contains(otherName)) {
addNameVariant(otherName);
update = true;
}
}
if (updateOtherMetadata("country", bio.getCountry())) {
addOtherMetadata("country", bio.getCountry());
}
for (String keyword : bio.getKeywords()) {
if (updateOtherMetadata("keyword", keyword)) {
addOtherMetadata("keyword", keyword);
}
}
for (BioExternalIdentifier externalIdentifier : bio.getBioExternalIdentifiers()) {
if (updateOtherMetadata("external_identifier", externalIdentifier.toString())) {
addOtherMetadata("external_identifier", externalIdentifier.toString());
}
}
for (BioResearcherUrl researcherUrl : bio.getResearcherUrls()) {
if (updateOtherMetadata("researcher_url", researcherUrl.toString())) {
addOtherMetadata("researcher_url", researcherUrl.toString());
}
}
if (updateOtherMetadata("biography", bio.getBiography())) {
addOtherMetadata("biography", bio.getBiography());
}
setValue(getName());
if (update) {
update();
}
boolean result = update;
update = false;
return result;
}
private boolean updateOtherMetadata(String label, String data) {
List<String> strings = getOtherMetadata().get(label);
boolean update;
if (strings == null) {
update = StringUtils.isNotBlank(data);
} else {
update = !strings.contains(data);
}
if (update) {
this.update = true;
}
return update;
}
private boolean updateValue(String incoming, String resident) {
boolean update = StringUtils.isNotBlank(incoming) && !incoming.equals(resident);
if (update) {
this.update = true;
}
return update;
}
@Override
public Map<String, String> choiceSelectMap() {
Map<String, String> map = super.choiceSelectMap();
map.put("orcid", getOrcid_id());
return map;
}
public String getAuthorityType() {
return "orcid";
}
@Override
public String generateString() {
String generateString = AuthorityValueGenerator.GENERATE + getAuthorityType() + AuthorityValueGenerator.SPLIT;
if (StringUtils.isNotBlank(getOrcid_id())) {
generateString += getOrcid_id();
}
return generateString;
}
@Override
public AuthorityValue newInstance(String info) {
AuthorityValue authorityValue = null;
if (StringUtils.isNotBlank(info)) {
Orcid orcid = Orcid.getOrcid();
authorityValue = orcid.queryAuthorityID(info);
} else {
authorityValue = OrcidAuthorityValue.create();
}
return authorityValue;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
OrcidAuthorityValue that = (OrcidAuthorityValue) o;
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
return orcid_id != null ? orcid_id.hashCode() : 0;
}
public boolean hasTheSameInformationAs(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.hasTheSameInformationAs(o)) {
return false;
}
OrcidAuthorityValue that = (OrcidAuthorityValue) o;
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
return false;
}
for (String key : otherMetadata.keySet()) {
if(otherMetadata.get(key) != null){
List<String> metadata = otherMetadata.get(key);
List<String> otherMetadata = that.otherMetadata.get(key);
if (otherMetadata == null) {
return false;
} else {
HashSet<String> metadataSet = new HashSet<String>(metadata);
HashSet<String> otherMetadataSet = new HashSet<String>(otherMetadata);
if (!metadataSet.equals(otherMetadataSet)) {
return false;
}
}
}else{
if(that.otherMetadata.get(key) != null){
return false;
}
}
}
return true;
}
}

View File

@@ -0,0 +1,185 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid;
import org.apache.commons.lang.StringUtils;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.log4j.Logger;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.SolrAuthorityInterface;
import org.dspace.authority.orcid.xml.XMLtoBio;
import org.dspace.authority.rest.RESTConnector;
import org.json.JSONObject;
import org.orcid.jaxb.model.record_v2.Person;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
/**
* @author Jonas Van Goolen (jonas at atmire dot com)
* This class contains all methods for retrieving "Person" objects calling the ORCID (version 2) endpoints.
* Additionally, this can also create AuthorityValues based on these returned Person objects
*/
public class Orcidv2 implements SolrAuthorityInterface {
private static Logger log = Logger.getLogger(Orcidv2.class);
public RESTConnector restConnector;
private String OAUTHUrl;
private String clientId;
private String clientSecret;
private String accessToken;
/**
* Initialize the accessToken that is required for all subsequent calls to ORCID
*/
public void init() throws IOException {
if (StringUtils.isNotBlank(accessToken) && StringUtils.isNotBlank(clientSecret)) {
String authenticationParameters = "?client_id=" + clientId + "&client_secret=" + clientSecret + "&scope=/read-public&grant_type=client_credentials";
HttpPost httpPost = new HttpPost(OAUTHUrl + authenticationParameters);
httpPost.addHeader("Accept", "application/json");
httpPost.addHeader("Content-Type", "application/x-www-form-urlencoded");
HttpClient httpClient = HttpClientBuilder.create().build();
HttpResponse getResponse = httpClient.execute(httpPost);
InputStream is = getResponse.getEntity().getContent();
BufferedReader streamReader = new BufferedReader(new InputStreamReader(is, "UTF-8"));
JSONObject responseObject = null;
String inputStr;
while ((inputStr = streamReader.readLine()) != null && responseObject == null) {
if (inputStr.startsWith("{") && inputStr.endsWith("}") && inputStr.contains("access_token")) {
try {
responseObject = new JSONObject(inputStr);
} catch (Exception e) {
//Not as valid as I'd hoped, move along
responseObject = null;
}
}
}
if (responseObject != null && responseObject.has("access_token")) {
accessToken = (String) responseObject.get("access_token");
}
}
}
/**
* Makes an instance of the Orcidv2 class based on the provided parameters.
* This constructor is called through the spring bean initialization
*/
private Orcidv2(String url, String OAUTHUrl, String clientId, String clientSecret) {
this.restConnector = new RESTConnector(url);
this.OAUTHUrl = OAUTHUrl;
this.clientId = clientId;
this.clientSecret = clientSecret;
}
/**
* Makes an instance of the Orcidv2 class based on the provided parameters.
* This constructor is called through the spring bean initialization
*/
private Orcidv2(String url) {
this.restConnector = new RESTConnector(url);
}
/**
* Makes an instance of the AuthorityValue with the given information.
* @param text search string
* @return List<AuthorityValue>
*/
@Override
public List<AuthorityValue> queryAuthorities(String text, int max) {
List<Person> bios = queryBio(text, max);
List<AuthorityValue> result = new ArrayList<>();
for (Person person : bios) {
AuthorityValue orcidAuthorityValue = Orcidv2AuthorityValue.create(person);
if (orcidAuthorityValue != null) {
result.add(orcidAuthorityValue);
}
}
return result;
}
/**
* Create an AuthorityValue from a Person retrieved using the given orcid identifier.
* @param id orcid identifier
* @return AuthorityValue
*/
public AuthorityValue queryAuthorityID(String id) {
Person person = getBio(id);
AuthorityValue valueFromPerson = Orcidv2AuthorityValue.create(person);
return valueFromPerson;
}
/**
* Retrieve a Person object based on a given orcid identifier
* @param id orcid identifier
* @return Person
*/
public Person getBio(String id) {
log.debug("getBio called with ID=" + id);
if(!isValid(id)){
return null;
}
InputStream bioDocument = restConnector.get(id + ((id.endsWith("/person")) ? "" : "/person"), accessToken);
XMLtoBio converter = new XMLtoBio();
Person person = converter.convertSinglePerson(bioDocument);
return person;
}
/**
* Retrieve a list of Person objects.
* @param text search string
* @param start offset to use
* @param rows how many rows to return
* @return List<Person>
*/
public List<Person> queryBio(String text, int start, int rows) {
if (rows > 100) {
throw new IllegalArgumentException("The maximum number of results to retrieve cannot exceed 100.");
}
String searchPath = "search?q=" + URLEncoder.encode(text) + "&start=" + start + "&rows=" + rows;
log.debug("queryBio searchPath=" + searchPath + " accessToken=" + accessToken);
InputStream bioDocument = restConnector.get(searchPath, accessToken);
XMLtoBio converter = new XMLtoBio();
List<Person> bios = converter.convert(bioDocument);
return bios;
}
/**
* Retrieve a list of Person objects.
* @param text search string
* @param max how many rows to return
* @return List<Person>
*/
public List<Person> queryBio(String text, int max) {
return queryBio(text, 0, max);
}
/**
* Check to see if the provided text has the correct ORCID syntax.
* Since only searching on ORCID id is allowed, this way, we filter out any queries that would return a blank result anyway
*/
private boolean isValid(String text) {
return StringUtils.isNotBlank(text) && text.matches(Orcidv2AuthorityValue.ORCID_ID_SYNTAX);
}
}

View File

@@ -0,0 +1,330 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid;
import org.apache.commons.lang.StringUtils;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrInputDocument;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.AuthorityValueGenerator;
import org.dspace.authority.PersonAuthorityValue;
import org.dspace.utils.DSpace;
import org.orcid.jaxb.model.common_v2.ExternalId;
import org.orcid.jaxb.model.record_v2.*;
import java.util.*;
/**
* @author Jonas Van Goolen (jonas at atmire dot com)
*/
public class Orcidv2AuthorityValue extends PersonAuthorityValue {
/*
* The ORCID identifier
*/
private String orcid_id;
/*
* Map containing key-value pairs filled in by "setValues(Person person)".
* This represents all dynamic information of the object.
*/
private Map<String, List<String>> otherMetadata = new HashMap<String, List<String>>();
/**
* The syntax that the ORCID id needs to conform to
*/
public static final String ORCID_ID_SYNTAX = "\\d{4}-\\d{4}-\\d{4}-(\\d{3}X|\\d{4})";
/**
* Creates an instance of Orcidv2AuthorityValue with only uninitialized fields.
* This is meant to be filled in with values from an existing record.
* To create a brand new Orcidv2AuthorityValue, use create()
*/
public Orcidv2AuthorityValue() {
}
public Orcidv2AuthorityValue(SolrDocument document) {
super(document);
}
public String getOrcid_id() {
return orcid_id;
}
public void setOrcid_id(String orcid_id) {
this.orcid_id = orcid_id;
}
/**
* Create an empty authority.
* @return OrcidAuthorityValue
*/
public static Orcidv2AuthorityValue create() {
Orcidv2AuthorityValue orcidAuthorityValue = new Orcidv2AuthorityValue();
orcidAuthorityValue.setId(UUID.randomUUID().toString());
orcidAuthorityValue.updateLastModifiedDate();
orcidAuthorityValue.setCreationDate(new Date());
return orcidAuthorityValue;
}
/**
* Create an authority based on a given orcid bio
* @return OrcidAuthorityValue
*/
public static Orcidv2AuthorityValue create(Person person) {
if (person == null) {
return null;
}
Orcidv2AuthorityValue authority = Orcidv2AuthorityValue.create();
authority.setValues(person);
return authority;
}
/**
* Initialize this instance based on a Person object
* @param person Person
*/
protected void setValues(Person person) {
NameType name = person.getName();
if (!StringUtils.equals(name.getPath(), this.getOrcid_id())) {
this.setOrcid_id(name.getPath());
}
if (!StringUtils.equals(name.getFamilyName().getValue(), this.getLastName())) {
this.setLastName(name.getFamilyName().getValue());
}
if (!StringUtils.equals(name.getGivenNames().getValue(), this.getFirstName())) {
this.setFirstName(name.getGivenNames().getValue());
}
if (name.getCreditName() != null && StringUtils.isNotBlank(name.getCreditName().getValue())) {
if (!this.getNameVariants().contains(name.getCreditName())) {
this.addNameVariant(name.getCreditName().getValue());
}
}
if (person.getKeywords() != null) {
for (KeywordType keyword : person.getKeywords().getKeyword()) {
if (this.isNewMetadata("keyword", keyword.getContent())) {
this.addOtherMetadata("keyword", keyword.getContent());
}
}
}
ExternalIdentifiers externalIdentifiers = person.getExternalIdentifiers();
if (externalIdentifiers != null) {
for (ExternalId externalIdentifier : externalIdentifiers.getExternalIdentifier()) {
if (this.isNewMetadata("external_identifier", externalIdentifier.getExternalIdValue())) {
this.addOtherMetadata("external_identifier", externalIdentifier.getExternalIdValue());
}
}
}
if (person.getResearcherUrls() != null) {
for (ResearcherUrlType researcherUrl : person.getResearcherUrls().getResearcherUrl()) {
if (this.isNewMetadata("researcher_url", researcherUrl.getUrl().getValue())) {
this.addOtherMetadata("researcher_url", researcherUrl.getUrl().getValue());
}
}
}
if (person.getBiography() != null) {
if (this.isNewMetadata("biography", person.getBiography().getContent())) {
this.addOtherMetadata("biography", person.getBiography().getContent());
}
}
this.setValue(this.getName());
}
/**
* Makes an instance of the AuthorityValue with the given information.
* @param info string info
* @return AuthorityValue
*/
@Override
public AuthorityValue newInstance(String info) {
AuthorityValue authorityValue = null;
if (StringUtils.isNotBlank(info)) {
Orcidv2 orcid = new DSpace().getServiceManager().getServiceByName("AuthoritySource", Orcidv2.class);
authorityValue = orcid.queryAuthorityID(info);
} else {
authorityValue = this.create();
}
return authorityValue;
}
@Override
public void setValue(String value) {
super.setValue(value);
}
/**
* Check to see if the provided label / data pair is already present in the "otherMetadata" or not
* */
public boolean isNewMetadata(String label, String data) {
List<String> strings = getOtherMetadata().get(label);
boolean update;
if (strings == null) {
update = StringUtils.isNotBlank(data);
} else {
update = !strings.contains(data);
}
return update;
}
/**
* Add additional metadata to the otherMetadata map*/
public void addOtherMetadata(String label, String data) {
List<String> strings = otherMetadata.get(label);
if (strings == null) {
strings = new ArrayList<>();
}
strings.add(data);
otherMetadata.put(label, strings);
}
public Map<String, List<String>> getOtherMetadata() {
return otherMetadata;
}
/**
* Generate a solr record from this instance
* @return SolrInputDocument
*/
@Override
public SolrInputDocument getSolrInputDocument() {
SolrInputDocument doc = super.getSolrInputDocument();
if (StringUtils.isNotBlank(getOrcid_id())) {
doc.addField("orcid_id", getOrcid_id());
}
for (String t : otherMetadata.keySet()) {
List<String> data = otherMetadata.get(t);
for (String data_entry : data) {
doc.addField("label_" + t, data_entry);
}
}
return doc;
}
/**
* Information that can be used the choice ui
* @return map
*/
@Override
public Map<String, String> choiceSelectMap() {
Map<String, String> map = super.choiceSelectMap();
String orcid_id = getOrcid_id();
if (StringUtils.isNotBlank(orcid_id)) {
map.put("orcid", orcid_id);
}
return map;
}
@Override
public String getAuthorityType() {
return "orcid";
}
/**
* Provides a string that will allow this AuthorityType to be recognized and provides information to create a new instance to be created using public Orcidv2AuthorityValue newInstance(String info).
* @return see {@link org.dspace.authority.AuthorityValueGenerator#GENERATE AuthorityValueGenerator.GENERATE}
*/
@Override
public String generateString() {
String generateString = AuthorityValueGenerator.GENERATE + getAuthorityType() + AuthorityValueGenerator.SPLIT;
if (StringUtils.isNotBlank(getOrcid_id())) {
generateString += getOrcid_id();
}
return generateString;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Orcidv2AuthorityValue that = (Orcidv2AuthorityValue) o;
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
return orcid_id != null ? orcid_id.hashCode() : 0;
}
/**
* The regular equals() only checks if both AuthorityValues describe the same authority.
* This method checks if the AuthorityValues have different information
* E.g. it is used to decide when lastModified should be updated.
* @param o object
* @return true or false
*/
@Override
public boolean hasTheSameInformationAs(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.hasTheSameInformationAs(o)) {
return false;
}
Orcidv2AuthorityValue that = (Orcidv2AuthorityValue) o;
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
return false;
}
for (String key : otherMetadata.keySet()) {
if (otherMetadata.get(key) != null) {
List<String> metadata = otherMetadata.get(key);
List<String> otherMetadata = that.otherMetadata.get(key);
if (otherMetadata == null) {
return false;
} else {
HashSet<String> metadataSet = new HashSet<String>(metadata);
HashSet<String> otherMetadataSet = new HashSet<String>(otherMetadata);
if (!metadataSet.equals(otherMetadataSet)) {
return false;
}
}
} else {
if (that.otherMetadata.get(key) != null) {
return false;
}
}
}
return true;
}
}

View File

@@ -1,113 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
import java.util.LinkedHashSet;
import java.util.Set;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Bio {
protected String orcid;
protected BioName name;
protected String country;
protected Set<String> keywords;
protected Set<BioExternalIdentifier> bioExternalIdentifiers;
protected Set<BioResearcherUrl> researcherUrls;
protected String biography;
public Bio() {
this.name = new BioName();
keywords = new LinkedHashSet<String>();
bioExternalIdentifiers = new LinkedHashSet<BioExternalIdentifier>();
researcherUrls = new LinkedHashSet<BioResearcherUrl>();
}
public String getOrcid() {
return orcid;
}
public void setOrcid(String orcid) {
this.orcid = orcid;
}
public BioName getName() {
return name;
}
public void setName(BioName name) {
this.name = name;
}
public String getCountry() {
return country;
}
public void setCountry(String country) {
this.country = country;
}
public Set<String> getKeywords() {
return keywords;
}
public void addKeyword(String keyword) {
this.keywords.add(keyword);
}
public Set<BioExternalIdentifier> getBioExternalIdentifiers() {
return bioExternalIdentifiers;
}
public void addExternalIdentifier(BioExternalIdentifier externalReference) {
bioExternalIdentifiers.add(externalReference);
}
public Set<BioResearcherUrl> getResearcherUrls() {
return researcherUrls;
}
public void addResearcherUrl(BioResearcherUrl researcherUrl) {
researcherUrls.add(researcherUrl);
}
public String getBiography() {
return biography;
}
public void setBiography(String biography) {
this.biography = biography;
}
@Override
public String toString() {
return "Bio{" +
"orcid='" + orcid + '\'' +
", name=" + name +
", country='" + country + '\'' +
", keywords=" + keywords +
", bioExternalIdentifiers=" + bioExternalIdentifiers +
", researcherUrls=" + researcherUrls +
", biography='" + biography + '\'' +
'}';
}
}

View File

@@ -1,109 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class BioExternalIdentifier {
protected String id_orcid;
protected String id_common_name;
protected String id_reference;
protected String id_url;
public BioExternalIdentifier(String id_orcid, String id_common_name, String id_reference, String id_url) {
this.id_orcid = id_orcid;
this.id_common_name = id_common_name;
this.id_reference = id_reference;
this.id_url = id_url;
}
public String getId_orcid() {
return id_orcid;
}
public void setId_orcid(String id_orcid) {
this.id_orcid = id_orcid;
}
public String getId_common_name() {
return id_common_name;
}
public void setId_common_name(String id_common_name) {
this.id_common_name = id_common_name;
}
public String getId_reference() {
return id_reference;
}
public void setId_reference(String id_reference) {
this.id_reference = id_reference;
}
public String getId_url() {
return id_url;
}
public void setId_url(String id_url) {
this.id_url = id_url;
}
@Override
public String toString() {
return "BioExternalIdentifier{" +
"id_orcid='" + id_orcid + '\'' +
", id_common_name='" + id_common_name + '\'' +
", id_reference='" + id_reference + '\'' +
", id_url='" + id_url + '\'' +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BioExternalIdentifier that = (BioExternalIdentifier) o;
if (id_common_name != null ? !id_common_name.equals(that.id_common_name) : that.id_common_name != null) {
return false;
}
if (id_orcid != null ? !id_orcid.equals(that.id_orcid) : that.id_orcid != null) {
return false;
}
if (id_reference != null ? !id_reference.equals(that.id_reference) : that.id_reference != null) {
return false;
}
if (id_url != null ? !id_url.equals(that.id_url) : that.id_url != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = id_orcid != null ? id_orcid.hashCode() : 0;
result = 31 * result + (id_common_name != null ? id_common_name.hashCode() : 0);
result = 31 * result + (id_reference != null ? id_reference.hashCode() : 0);
result = 31 * result + (id_url != null ? id_url.hashCode() : 0);
return result;
}
}

View File

@@ -1,115 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
import java.util.ArrayList;
import java.util.List;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class BioName {
protected String givenNames;
protected String familyName;
protected String creditName;
protected List<String> otherNames;
BioName() {
otherNames = new ArrayList<String>();
}
BioName(String givenNames, String familyName, String creditName, List<String> otherNames) {
this.givenNames = givenNames;
this.familyName = familyName;
this.creditName = creditName;
this.otherNames = otherNames;
}
public String getGivenNames() {
return givenNames;
}
public void setGivenNames(String givenNames) {
this.givenNames = givenNames;
}
public String getFamilyName() {
return familyName;
}
public void setFamilyName(String familyName) {
this.familyName = familyName;
}
public String getCreditName() {
return creditName;
}
public void setCreditName(String creditName) {
this.creditName = creditName;
}
public List<String> getOtherNames() {
return otherNames;
}
public void setOtherNames(List<String> otherNames) {
this.otherNames = otherNames;
}
@Override
public String toString() {
return "BioName{" +
"givenNames='" + givenNames + '\'' +
", familyName='" + familyName + '\'' +
", creditName='" + creditName + '\'' +
", otherNames=" + otherNames +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BioName bioName = (BioName) o;
if (creditName != null ? !creditName.equals(bioName.creditName) : bioName.creditName != null) {
return false;
}
if (familyName != null ? !familyName.equals(bioName.familyName) : bioName.familyName != null) {
return false;
}
if (givenNames != null ? !givenNames.equals(bioName.givenNames) : bioName.givenNames != null) {
return false;
}
if (otherNames != null ? !otherNames.equals(bioName.otherNames) : bioName.otherNames != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = givenNames != null ? givenNames.hashCode() : 0;
result = 31 * result + (familyName != null ? familyName.hashCode() : 0);
result = 31 * result + (creditName != null ? creditName.hashCode() : 0);
result = 31 * result + (otherNames != null ? otherNames.hashCode() : 0);
return result;
}
}

View File

@@ -1,78 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class BioResearcherUrl {
protected String name;
protected String url;
public BioResearcherUrl(String name, String url) {
this.name = name;
this.url = url;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
@Override
public String toString() {
return "BioResearcherUrl{" +
"name='" + name + '\'' +
", url='" + url + '\'' +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BioResearcherUrl that = (BioResearcherUrl) o;
if (name != null ? !name.equals(that.name) : that.name != null) {
return false;
}
if (url != null ? !url.equals(that.url) : that.url != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = name != null ? name.hashCode() : 0;
result = 31 * result + (url != null ? url.hashCode() : 0);
return result;
}
}

View File

@@ -1,50 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Citation {
private CitationType type;
private String citation;
public Citation(CitationType type, String citation) {
this.type = type;
this.citation = citation;
}
public CitationType getType() {
return type;
}
public void setType(CitationType type) {
this.type = type;
}
public String getCitation() {
return citation;
}
public void setCitation(String citation) {
this.citation = citation;
}
@Override
public String toString() {
return "Citation{" +
"type=" + type +
", citation='" + citation + '\'' +
'}';
}
}

View File

@@ -1,29 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public enum CitationType {
FORMATTED_UNSPECIFIED,
BIBTEX,
FORMATTED_APA,
FORMATTED_HARVARD,
FORMATTED_IEEE,
FORMATTED_MLA,
FORMATTED_VANCOUVER,
FORMATTED_CHICAGO
}

View File

@@ -1,111 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
import java.util.Set;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Contributor {
private String orcid;
private String creditName;
private String email;
private Set<ContributorAttribute> contributorAttributes;
public Contributor(String orcid, String creditName, String email, Set<ContributorAttribute> contributorAttributes) {
this.orcid = orcid;
this.creditName = creditName;
this.email = email;
this.contributorAttributes = contributorAttributes;
}
public String getOrcid() {
return orcid;
}
public void setOrcid(String orcid) {
this.orcid = orcid;
}
public String getCreditName() {
return creditName;
}
public void setCreditName(String creditName) {
this.creditName = creditName;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public Set<ContributorAttribute> getContributorAttributes() {
return contributorAttributes;
}
public void setContributorAttributes(Set<ContributorAttribute> contributorAttributes) {
this.contributorAttributes = contributorAttributes;
}
@Override
public String toString() {
return "Contributor{" +
"orcid='" + orcid + '\'' +
", creditName='" + creditName + '\'' +
", email='" + email + '\'' +
", contributorAttributes=" + contributorAttributes +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Contributor that = (Contributor) o;
if (contributorAttributes != null ? !contributorAttributes.equals(that.contributorAttributes) : that.contributorAttributes != null) {
return false;
}
if (creditName != null ? !creditName.equals(that.creditName) : that.creditName != null) {
return false;
}
if (email != null ? !email.equals(that.email) : that.email != null) {
return false;
}
if (orcid != null ? !orcid.equals(that.orcid) : that.orcid != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = orcid != null ? orcid.hashCode() : 0;
result = 31 * result + (creditName != null ? creditName.hashCode() : 0);
result = 31 * result + (email != null ? email.hashCode() : 0);
result = 31 * result + (contributorAttributes != null ? contributorAttributes.hashCode() : 0);
return result;
}
}

View File

@@ -1,79 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class ContributorAttribute {
private ContributorAttributeRole role;
private ContributorAttributeSequence sequence;
public ContributorAttribute(ContributorAttributeRole role, ContributorAttributeSequence sequence) {
this.role = role;
this.sequence = sequence;
}
public ContributorAttributeRole getRole() {
return role;
}
public void setRole(ContributorAttributeRole role) {
this.role = role;
}
public ContributorAttributeSequence getSequence() {
return sequence;
}
public void setSequence(ContributorAttributeSequence sequence) {
this.sequence = sequence;
}
@Override
public String toString() {
return "ContributorAttribute{" +
"role=" + role +
", sequence=" + sequence +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ContributorAttribute that = (ContributorAttribute) o;
if (role != that.role) {
return false;
}
if (sequence != that.sequence) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = role != null ? role.hashCode() : 0;
result = 31 * result + (sequence != null ? sequence.hashCode() : 0);
return result;
}
}

View File

@@ -1,32 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* http://support.orcid.org/knowledgebase/articles/118843-anatomy-of-a-contributor
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public enum ContributorAttributeRole {
AUTHOR,
ASSIGNEE,
EDITOR,
CHAIR_OR_TRANSLATOR,
CO_INVESTIGATOR,
CO_INVENTOR,
GRADUATE_STUDENT,
OTHER_INVENTOR,
PRINCIPAL_INVESTIGATOR,
POSTDOCTORAL_RESEARCHER,
SUPPORT_STAFF
}

View File

@@ -1,23 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* http://support.orcid.org/knowledgebase/articles/118843-anatomy-of-a-contributor
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public enum ContributorAttributeSequence {
FIRST,
ADDITIONAL
}

View File

@@ -1,117 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
import java.util.Set;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Work {
private WorkTitle workTitle;
private String description;
private Citation citation;
private WorkType workType;
private String publicationDate;
private WorkExternalIdentifier workExternalIdentifier;
private String url;
private Set<Contributor> contributors;
private String workSource;
public WorkTitle getWorkTitle() {
return workTitle;
}
public void setWorkTitle(WorkTitle workTitle) {
this.workTitle = workTitle;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Citation getCitation() {
return citation;
}
public void setCitation(Citation citation) {
this.citation = citation;
}
public WorkType getWorkType() {
return workType;
}
public void setWorkType(WorkType workType) {
this.workType = workType;
}
public String getPublicationDate() {
return publicationDate;
}
public void setPublicationDate(String publicationDate) {
this.publicationDate = publicationDate;
}
public WorkExternalIdentifier getWorkExternalIdentifier() {
return workExternalIdentifier;
}
public void setWorkExternalIdentifier(WorkExternalIdentifier workExternalIdentifier) {
this.workExternalIdentifier = workExternalIdentifier;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public Set<Contributor> getContributors() {
return contributors;
}
public void setContributors(Set<Contributor> contributors) {
this.contributors = contributors;
}
public String getWorkSource() {
return workSource;
}
public void setWorkSource(String workSource) {
this.workSource = workSource;
}
@Override
public String toString() {
return "Work{" +
"workTitle=" + workTitle +
", description='" + description + '\'' +
", citation=" + citation +
", workType=" + workType +
", publicationDate='" + publicationDate + '\'' +
", workExternalIdentifier=" + workExternalIdentifier +
", url='" + url + '\'' +
", contributors=" + contributors +
", workSource='" + workSource + '\'' +
'}';
}
}

View File

@@ -1,71 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* http://support.orcid.org/knowledgebase/articles/118807
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class WorkExternalIdentifier {
private WorkExternalIdentifierType workExternalIdentifierType;
private String workExternalIdenfitierID;
public WorkExternalIdentifier(WorkExternalIdentifierType workExternalIdentifierType, String workExternalIdenfitierID) {
this.workExternalIdentifierType = workExternalIdentifierType;
this.workExternalIdenfitierID = workExternalIdenfitierID;
}
public WorkExternalIdentifierType getWorkExternalIdentifierType() {
return workExternalIdentifierType;
}
public void setWorkExternalIdentifierType(WorkExternalIdentifierType workExternalIdentifierType) {
this.workExternalIdentifierType = workExternalIdentifierType;
}
@Override
public String toString() {
return "WorkExternalIdentifier{" +
"workExternalIdentifierType=" + workExternalIdentifierType +
", workExternalIdenfitierID='" + workExternalIdenfitierID + '\'' +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
WorkExternalIdentifier that = (WorkExternalIdentifier) o;
if (workExternalIdenfitierID != null ? !workExternalIdenfitierID.equals(that.workExternalIdenfitierID) : that.workExternalIdenfitierID != null) {
return false;
}
if (workExternalIdentifierType != that.workExternalIdentifierType) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = workExternalIdentifierType != null ? workExternalIdentifierType.hashCode() : 0;
result = 31 * result + (workExternalIdenfitierID != null ? workExternalIdenfitierID.hashCode() : 0);
return result;
}
}

View File

@@ -1,42 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* http://support.orcid.org/knowledgebase/articles/118807
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public enum WorkExternalIdentifierType {
// OTHER_ID,
ARXIV,
ASIN,
ASIN_TLD,
BIBCODE,
DOI,
EID,
ISBN,
ISSN,
JFM,
JSTOR,
LCCN,
MR,
OCLC,
OL,
OSTI,
PMC,
PMID,
RFC,
SSRN,
ZBL
}

View File

@@ -1,64 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
import java.util.Map;
/**
* http://support.orcid.org/knowledgebase/articles/118807
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class WorkTitle {
private String title;
private String subtitle;
private Map<String, String> translatedTitles;
public WorkTitle(String title, String subtitle, Map<String, String> translatedTitles) {
this.title = title;
this.subtitle = subtitle;
this.translatedTitles = translatedTitles;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getSubtitle() {
return subtitle;
}
public void setSubtitle(String subtitle) {
this.subtitle = subtitle;
}
public String getTranslatedTitles(String languageCode) {
return translatedTitles.get(languageCode);
}
public void setTranslatedTitle(String languageCode, String translatedTitle) {
translatedTitles.put(languageCode, translatedTitle);
}
@Override
public String toString() {
return "WorkTitle{" +
"title='" + title + '\'' +
", subtitle='" + subtitle + '\'' +
", translatedTitles=" + translatedTitles +
'}';
}
}

View File

@@ -1,57 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* http://support.orcid.org/knowledgebase/articles/118795
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public enum WorkType {
BOOK,
BOOK_CHAPTER,
BOOK_REVIEW,
DICTIONARY_ENTRY,
DISSERTATION,
ENCYCLOPEDIA_ARTICLE,
EDITED_BOOK,
JOURNAL_ARTICLE,
JOURNAL_ISSUE,
MAGAZINE_ARTICLE,
MANUAL,
ONLINE_RESOURCE,
NEWSLETTER_ARTICLE,
NEWSPAPER_ARTICLE,
REPORT,
RESEARCH_TOOL,
SUPERVISED_STUDENT_PUBLICATION,
TEST,
TRANSLATION,
WEBSITE,
CONFERENCE_ABSTRACT,
CONFERENCE_PAPER,
CONFERENCE_POSTER,
DISCLOSURE,
LICENSE,
PATENT,
REGISTERED_COPYRIGHT,
ARTISTIC_PERFORMANCE,
DATA_SET,
INVENTION,
LECTURE_SPEECH,
RESEARCH_TECHNIQUE,
SPIN_OFF_COMPANY,
STANDARDS_AND_POLICY,
TECHNICAL_STANDARD,
OTHER
}

View File

@@ -8,7 +8,13 @@
package org.dspace.authority.orcid.xml;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
import java.io.InputStream;
import java.net.URISyntaxException;
/**
*
@@ -24,11 +30,15 @@ public abstract class Converter<T> {
*/
private static Logger log = Logger.getLogger(Converter.class);
public abstract T convert(InputStream document);
protected void processError(Document xml) {
String errorMessage = XMLErrors.getErrorMessage(xml);
log.error("The orcid-message reports an error: " + errorMessage);
protected Object unmarshall(InputStream input, Class<?> type) throws SAXException, URISyntaxException {
try {
JAXBContext context = JAXBContext.newInstance(type);
Unmarshaller unmarshaller = context.createUnmarshaller();
return unmarshaller.unmarshal(input);
} catch (JAXBException e) {
throw new RuntimeException("Unable to unmarshall orcid message" + e);
}
}
public abstract T convert(Document document);
}

View File

@@ -1,73 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.xml;
import org.dspace.authority.util.XMLUtils;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import javax.xml.xpath.XPathExpressionException;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class XMLErrors {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(XMLErrors.class);
private static final String ERROR_DESC = "/orcid-message/error-desc";
/**
* Evaluates whether a given xml document contains errors or not.
*
* @param xml The given xml document
* @return true if the given xml document is null
* or if it contains errors
*/
public static boolean check(Document xml) {
if (xml == null) {
return true;
}
String textContent = null;
try {
textContent = XMLUtils.getTextContent(xml, ERROR_DESC);
} catch (XPathExpressionException e) {
log.error("Error while checking for errors in orcid message", e);
}
return textContent == null;
}
public static String getErrorMessage(Document xml) {
if (xml == null) {
return "Did not receive an XML document.";
}
String textContent = null;
try {
textContent = XMLUtils.getTextContent(xml, ERROR_DESC);
} catch (XPathExpressionException e) {
log.error("Error while checking for errors in orcid message", e);
}
return textContent;
}
}

View File

@@ -7,23 +7,22 @@
*/
package org.dspace.authority.orcid.xml;
import org.dspace.authority.orcid.model.Bio;
import org.dspace.authority.orcid.model.BioExternalIdentifier;
import org.dspace.authority.orcid.model.BioName;
import org.dspace.authority.orcid.model.BioResearcherUrl;
import org.dspace.authority.util.XMLUtils;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import javax.xml.xpath.XPathExpressionException;
import org.apache.log4j.Logger;
import org.dspace.authority.orcid.Orcidv2;
import org.dspace.utils.DSpace;
import org.orcid.jaxb.model.common_v2.OrcidId;
import org.orcid.jaxb.model.record_v2.Person;
import org.orcid.jaxb.model.search_v2.Result;
import org.orcid.jaxb.model.search_v2.Search;
import org.xml.sax.SAXException;
import java.io.InputStream;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
@@ -36,216 +35,39 @@ public class XMLtoBio extends Converter {
*/
private static Logger log = Logger.getLogger(XMLtoBio.class);
/**
* orcid-message XPATHs
*/
protected String ORCID_BIO = "//orcid-bio";
// protected String ORCID = "parent::*/orcid";
protected String ORCID = "parent::*/orcid-identifier/path";
protected String PERSONAL_DETAILS = "personal-details";
protected String GIVEN_NAMES = PERSONAL_DETAILS + "/given-names";
protected String FAMILY_NAME = PERSONAL_DETAILS + "/family-name";
protected String CREDIT_NAME = PERSONAL_DETAILS + "/credit-name";
protected String OTHER_NAMES = PERSONAL_DETAILS + "/other-names";
protected String OTHER_NAME = OTHER_NAMES + "/other-name";
protected String CONTACT_DETAILS = "contact-details";
protected String COUNTRY = CONTACT_DETAILS + "/address/country";
protected String KEYWORDS = "keywords";
protected String KEYWORD = KEYWORDS + "/keyword";
protected String EXTERNAL_IDENTIFIERS = "external-identifiers";
protected String EXTERNAL_IDENTIFIER = EXTERNAL_IDENTIFIERS + "/external-identifier";
protected String EXTERNAL_ID_ORCID = "external-id-orcid";
protected String EXTERNAL_ID_COMMNON_NAME = "external-id-common-name";
protected String EXTERNAL_ID_REFERENCE = "external-id-reference";
protected String EXTERNAL_ID_URL = "external-id-url";
protected String RESEARCHER_URLS = "researcher-urls";
protected String RESEARCHER_URL = "researcher-urls/researcher-url";
protected String URL_NAME = "url-name";
protected String URL = "url";
protected String BIOGRAPHY = ORCID_BIO + "/biography";
protected String AFFILIATIONS = ORCID_BIO + "/affiliation";
/**
* Regex
*/
protected String ORCID_NOT_FOUND = "ORCID [\\d-]* not found";
public List<Bio> convert(Document xml) {
List<Bio> result = new ArrayList<Bio>();
if (XMLErrors.check(xml)) {
try {
Iterator<Node> iterator = XMLUtils.getNodeListIterator(xml, ORCID_BIO);
while (iterator.hasNext()) {
Bio bio = convertBio(iterator.next());
result.add(bio);
}
} catch (XPathExpressionException e) {
log.error("Error in xpath syntax", e);
}
} else {
processError(xml);
}
return result;
}
private Bio convertBio(Node node) {
Bio bio = new Bio();
setOrcid(node,bio);
setPersonalDetails(node, bio);
setContactDetails(node, bio);
setKeywords(node, bio);
setExternalIdentifiers(node, bio);
setResearcherUrls(node, bio);
setBiography(node, bio);
return bio;
}
protected void processError(Document xml) {
String errorMessage = XMLErrors.getErrorMessage(xml);
if(errorMessage.matches(ORCID_NOT_FOUND))
{
// do something?
}
log.error("The orcid-message reports an error: " + errorMessage);
}
private void setOrcid(Node node, Bio bio) {
@Override
public List<Person> convert(InputStream xml) {
List<Person> bios= new ArrayList<>();
try {
String orcid = XMLUtils.getTextContent(node, ORCID);
bio.setOrcid(orcid);
} catch (XPathExpressionException e) {
log.debug("Error in finding the biography in bio xml.", e);
}
}
Orcidv2 connector = new DSpace().getServiceManager().getServiceByName("AuthoritySource", Orcidv2.class);
protected void setBiography(Node xml, Bio bio) {
try {
String biography = XMLUtils.getTextContent(xml, BIOGRAPHY);
bio.setBiography(biography);
} catch (XPathExpressionException e) {
log.error("Error in finding the biography in bio xml.", e);
}
}
protected void setResearcherUrls(Node xml, Bio bio) {
try {
NodeList researcher_urls = XMLUtils.getNodeList(xml, RESEARCHER_URL);
if (researcher_urls != null) {
for (int i = 0; i < researcher_urls.getLength(); i++) {
Node researcher_url = researcher_urls.item(i);
if (researcher_url.getNodeType() != Node.TEXT_NODE) {
String url_name = XMLUtils.getTextContent(researcher_url, URL_NAME);
String url = XMLUtils.getTextContent(researcher_url, URL);
BioResearcherUrl researcherUrl = new BioResearcherUrl(url_name, url);
bio.addResearcherUrl(researcherUrl);
Search search = (Search) unmarshall(xml, Search.class);
for(Result result : search.getResult()){
OrcidId orcidIdentifier = result.getOrcidIdentifier();
if(orcidIdentifier!=null){
log.debug("Found OrcidId=" + orcidIdentifier.toString());
String orcid = orcidIdentifier.getUriPath();
Person bio = connector.getBio(orcid);
if(bio!=null){
bios.add(bio);
}
}
}
} catch (XPathExpressionException e) {
log.error("Error in finding the researcher url in bio xml.", e);
} catch (SAXException | URISyntaxException e) {
log.error(e);
}
return bios;
}
protected void setExternalIdentifiers(Node xml, Bio bio) {
public Person convertSinglePerson(InputStream xml) {
Person person = null;
try {
Iterator<Node> iterator = XMLUtils.getNodeListIterator(xml, EXTERNAL_IDENTIFIER);
while (iterator.hasNext()) {
Node external_identifier = iterator.next();
String id_orcid = XMLUtils.getTextContent(external_identifier, EXTERNAL_ID_ORCID);
String id_common_name = XMLUtils.getTextContent(external_identifier, EXTERNAL_ID_COMMNON_NAME);
String id_reference = XMLUtils.getTextContent(external_identifier, EXTERNAL_ID_REFERENCE);
String id_url = XMLUtils.getTextContent(external_identifier, EXTERNAL_ID_URL);
BioExternalIdentifier externalIdentifier = new BioExternalIdentifier(id_orcid, id_common_name, id_reference, id_url);
bio.addExternalIdentifier(externalIdentifier);
}
} catch (XPathExpressionException e) {
log.error("Error in finding the external identifier in bio xml.", e);
}
}
protected void setKeywords(Node xml, Bio bio) {
try {
NodeList keywords = XMLUtils.getNodeList(xml, KEYWORD);
if (keywords != null) {
for (int i = 0; i < keywords.getLength(); i++) {
String keyword = keywords.item(i).getTextContent();
String[] split = keyword.split(",");
for (String k : split) {
bio.addKeyword(k.trim());
}
}
}
} catch (XPathExpressionException e) {
log.error("Error in finding the keywords in bio xml.", e);
}
}
protected void setContactDetails(Node xml, Bio bio) {
try {
String country = XMLUtils.getTextContent(xml, COUNTRY);
bio.setCountry(country);
} catch (XPathExpressionException e) {
log.error("Error in finding the country in bio xml.", e);
}
}
protected void setPersonalDetails(Node xml, Bio bio) {
BioName name = bio.getName();
try {
String givenNames = XMLUtils.getTextContent(xml, GIVEN_NAMES);
name.setGivenNames(givenNames);
} catch (XPathExpressionException e) {
log.error("Error in finding the given names in bio xml.", e);
}
try {
String familyName = XMLUtils.getTextContent(xml, FAMILY_NAME);
name.setFamilyName(familyName);
} catch (XPathExpressionException e) {
log.error("Error in finding the family name in bio xml.", e);
}
try {
String creditName = XMLUtils.getTextContent(xml, CREDIT_NAME);
name.setCreditName(creditName);
} catch (XPathExpressionException e) {
log.error("Error in finding the credit name in bio xml.", e);
}
try {
Iterator<Node> iterator = XMLUtils.getNodeListIterator(xml, OTHER_NAME);
while (iterator.hasNext()) {
Node otherName = iterator.next();
String textContent = otherName.getTextContent();
name.getOtherNames().add(textContent.trim());
}
} catch (XPathExpressionException e) {
log.error("Error in finding the other names in bio xml.", e);
person = (Person) unmarshall(xml, Person.class);
return person;
} catch (SAXException | URISyntaxException e) {
log.error(e);
}
return null;
}
}

View File

@@ -1,239 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.xml;
import org.dspace.authority.orcid.model.*;
import org.dspace.authority.util.*;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import javax.xml.xpath.XPathExpressionException;
import java.util.*;
/**
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class XMLtoWork extends Converter {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(XMLtoWork.class);
/**
* orcid-message XPATHs
*/
protected String ORCID_WORKS = "//orcid-works";
protected String ORCID_WORK = ORCID_WORKS + "/orcid-work";
protected String WORK_TITLE = "work-title";
protected String TITLE = WORK_TITLE + "/title";
protected String SUBTITLE = WORK_TITLE + "/subtitle";
protected String TRANSLATED_TITLES = WORK_TITLE + "/translated-title";
protected String TRANSLATED_TITLES_LANGUAGE = "@language-code";
protected String SHORT_DESCRIPTION = "short-description";
protected String WORK_CITATION = "work-citation";
protected String CITATION_TYPE = WORK_CITATION + "/work-citation-type";
protected String CITATION = WORK_CITATION + "/citation";
protected String WORK_TYPE = "work-type";
protected String PUBLICATION_DATE = "publication-date";
protected String YEAR = PUBLICATION_DATE + "/year";
protected String MONTH = PUBLICATION_DATE + "/month";
protected String DAY = PUBLICATION_DATE + "/day";
protected String WORK_EXTERNAL_IDENTIFIERS = "work-external-identifiers";
protected String WORK_EXTERNAL_IDENTIFIER = WORK_EXTERNAL_IDENTIFIERS + "/work-external-identifier";
protected String WORK_EXTERNAL_IDENTIFIER_TYPE = "work-external-identifier-type";
protected String WORK_EXTERNAL_IDENTIFIER_ID = "work-external-identifier-id";
protected String URL = "url";
protected String WORK_CONTRIBUTOR = "work-contributors";
protected String CONTRIBUTOR = WORK_CONTRIBUTOR+"/contributor";
protected String CONTRIBUTOR_ORCID = "contributor-orcid";
protected String CREDIT_NAME = "credit-name";
protected String CONTRIBUTOR_EMAIL = "contributor-email";
protected String CONTRIBUTOR_ATTRIBUTES = "contributor-attributes";
protected String CONTRIBUTOR_SEQUENCE = "contributor-sequence";
protected String CONTRIBUTOR_ROLE = "contributor-role";
protected String WORK_SOURCE = "work-source";
public List<Work> convert(Document document) {
List<Work> result = new ArrayList<Work>();
if (XMLErrors.check(document)) {
try {
Iterator<Node> iterator = XMLUtils.getNodeListIterator(document, ORCID_WORK);
while (iterator.hasNext()) {
Work work = convertWork(iterator.next());
result.add(work);
}
} catch (XPathExpressionException e) {
log.error("Error in xpath syntax", e);
}
} else {
processError(document);
}
return result;
}
protected Work convertWork(Node node) throws XPathExpressionException {
Work work = new Work();
setTitle(node, work);
setDescription(node, work);
setCitation(node, work);
setWorkType(node, work);
setPublicationDate(node, work);
setExternalIdentifiers(node, work);
setUrl(node, work);
setContributors(node, work);
setWorkSource(node, work);
return work;
}
protected void setWorkSource(Node node, Work work) throws XPathExpressionException {
String workSource = XMLUtils.getTextContent(node, WORK_SOURCE);
work.setWorkSource(workSource);
}
protected void setContributors(Node node, Work work) throws XPathExpressionException {
Set<Contributor> contributors = new HashSet<Contributor>();
Iterator<Node> iterator = XMLUtils.getNodeListIterator(node, CONTRIBUTOR);
while (iterator.hasNext()) {
Node nextContributorNode = iterator.next();
String orcid = XMLUtils.getTextContent(nextContributorNode, CONTRIBUTOR_ORCID);
String creditName = XMLUtils.getTextContent(nextContributorNode, CREDIT_NAME);
String email = XMLUtils.getTextContent(nextContributorNode, CONTRIBUTOR_EMAIL);
Set<ContributorAttribute> contributorAttributes = new HashSet<ContributorAttribute>();
NodeList attributeNodes = XMLUtils.getNodeList(nextContributorNode, CONTRIBUTOR_ATTRIBUTES);
Iterator<Node> attributesIterator = XMLUtils.getNodeListIterator(attributeNodes);
while (attributesIterator.hasNext()) {
Node nextAttribute = attributesIterator.next();
String roleText = XMLUtils.getTextContent(nextAttribute, CONTRIBUTOR_ROLE);
ContributorAttributeRole role = EnumUtils.lookup(ContributorAttributeRole.class, roleText);
String sequenceText = XMLUtils.getTextContent(nextAttribute, CONTRIBUTOR_SEQUENCE);
ContributorAttributeSequence sequence = EnumUtils.lookup(ContributorAttributeSequence.class, sequenceText);
ContributorAttribute attribute = new ContributorAttribute(role, sequence);
contributorAttributes.add(attribute);
}
Contributor contributor = new Contributor(orcid, creditName, email, contributorAttributes);
contributors.add(contributor);
}
work.setContributors(contributors);
}
protected void setUrl(Node node, Work work) throws XPathExpressionException {
String url = XMLUtils.getTextContent(node, URL);
work.setUrl(url);
}
protected void setExternalIdentifiers(Node node, Work work) throws XPathExpressionException {
Iterator<Node> iterator = XMLUtils.getNodeListIterator(node, WORK_EXTERNAL_IDENTIFIER);
while (iterator.hasNext()) {
Node work_external_identifier = iterator.next();
String typeText = XMLUtils.getTextContent(work_external_identifier, WORK_EXTERNAL_IDENTIFIER_TYPE);
WorkExternalIdentifierType type = EnumUtils.lookup(WorkExternalIdentifierType.class, typeText);
String id = XMLUtils.getTextContent(work_external_identifier, WORK_EXTERNAL_IDENTIFIER_ID);
WorkExternalIdentifier externalID = new WorkExternalIdentifier(type, id);
work.setWorkExternalIdentifier(externalID);
}
}
protected void setPublicationDate(Node node, Work work) throws XPathExpressionException {
String year = XMLUtils.getTextContent(node, YEAR);
String month = XMLUtils.getTextContent(node, MONTH);
String day = XMLUtils.getTextContent(node, DAY);
String publicationDate = year;
if (StringUtils.isNotBlank(month)) {
publicationDate += "-" + month;
if (StringUtils.isNotBlank(day)) {
publicationDate += "-" + day;
}
}
work.setPublicationDate(publicationDate);
}
protected void setWorkType(Node node, Work work) throws XPathExpressionException {
String workTypeText = XMLUtils.getTextContent(node, WORK_TYPE);
WorkType workType = EnumUtils.lookup(WorkType.class, workTypeText);
work.setWorkType(workType);
}
protected void setCitation(Node node, Work work) throws XPathExpressionException {
String typeText = XMLUtils.getTextContent(node, CITATION_TYPE);
CitationType type = EnumUtils.lookup(CitationType.class, typeText);
String citationtext = XMLUtils.getTextContent(node, CITATION);
Citation citation = new Citation(type, citationtext);
work.setCitation(citation);
}
protected void setDescription(Node node, Work work) throws XPathExpressionException {
String description = null;
description = XMLUtils.getTextContent(node, SHORT_DESCRIPTION);
work.setDescription(description);
}
protected void setTitle(Node node, Work work) throws XPathExpressionException {
String title = XMLUtils.getTextContent(node, TITLE);
String subtitle = XMLUtils.getTextContent(node, SUBTITLE);
Map<String, String> translatedTitles = new HashMap<String, String>();
NodeList nodeList = XMLUtils.getNodeList(node, TRANSLATED_TITLES);
Iterator<Node> iterator = XMLUtils.getNodeListIterator(nodeList);
while (iterator.hasNext()) {
Node languageNode = iterator.next();
String language = XMLUtils.getTextContent(languageNode, TRANSLATED_TITLES_LANGUAGE);
String translated_title = XMLUtils.getTextContent(languageNode, ".");
translatedTitles.put(language, translated_title);
}
WorkTitle workTitle = new WorkTitle(title, subtitle, translatedTitles);
work.setWorkTitle(workTitle);
}
}

View File

@@ -7,13 +7,12 @@
*/
package org.dspace.authority.rest;
import org.apache.http.impl.client.HttpClientBuilder;
import org.dspace.authority.util.XMLUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import java.io.InputStream;
import java.util.Scanner;
@@ -38,26 +37,28 @@ public class RESTConnector {
this.url = url;
}
public Document get(String path) {
Document document = null;
public InputStream get(String path, String accessToken) {
InputStream result = null;
path = trimSlashes(path);
String fullPath = url + '/' + path;
HttpGet httpGet = new HttpGet(fullPath);
if(StringUtils.isNotBlank(accessToken)){
httpGet.addHeader("Content-Type", "application/vnd.orcid+xml");
httpGet.addHeader("Authorization","Bearer "+accessToken);
}
try {
HttpClient httpClient = HttpClientBuilder.create().build();
HttpResponse getResponse = httpClient.execute(httpGet);
//do not close this httpClient
result = getResponse.getEntity().getContent();
document = XMLUtils.convertStreamToXML(result);
} catch (Exception e) {
getGotError(e, fullPath);
}
return document;
return result;
}
protected void getGotError(Exception e, String fullPath) {

View File

@@ -7,9 +7,7 @@
*/
package org.dspace.authority.rest;
import org.dspace.authority.AuthorityValue;
import java.util.List;
import org.dspace.authority.SolrAuthorityInterface;
/**
*
@@ -18,21 +16,11 @@ import java.util.List;
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public abstract class RestSource {
public abstract class RestSource implements SolrAuthorityInterface {
protected RESTConnector restConnector;
public RestSource(String url) {
this.restConnector = new RESTConnector(url);
}
/**
* TODO
* com.atmire.org.dspace.authority.rest.RestSource#queryAuthorities -> add field, so the source can decide whether to query /users or something else.
* -> implement subclasses
* -> implement usages
*/
public abstract List<AuthorityValue> queryAuthorities(String text, int max);
public abstract AuthorityValue queryAuthorityID(String id);
}

View File

@@ -20,6 +20,7 @@ import org.dspace.eperson.Group;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator;
import org.dspace.workflow.WorkflowItem;
/**
* AuthorizeManager handles all authorization checks for DSpace. For better
@@ -295,8 +296,43 @@ public class AuthorizeManager
}
}
// In case the dso is an bundle or bitstream we must ignore custom
// policies if it does not belong to at least one installed item (see
// DS-2614).
// In case the dso is an item and a corresponding workspace or workflow
// item exist, we have to ignore custom policies (see DS-2614).
boolean ignoreCustomPolicies = false;
if (o instanceof Bitstream)
{
Bitstream b = (Bitstream) o;
// Ensure that this is not a collection or community logo
DSpaceObject parent = b.getParentObject();
if (!(parent instanceof Collection) && !(parent instanceof Community))
{
ignoreCustomPolicies = !isAnyItemInstalled(c, b.getBundles());
}
}
if (o instanceof Bundle)
{
ignoreCustomPolicies = !isAnyItemInstalled(c, new Bundle[] {(Bundle) o});
}
if (o instanceof Item)
{
if (WorkspaceItem.findByItem(c, (Item) o) != null ||
WorkflowItem.findByItem(c, (Item) o) != null)
{
ignoreCustomPolicies = true;
}
}
for (ResourcePolicy rp : getPoliciesActionFilter(c, o, action))
{
if (ignoreCustomPolicies
&& ResourcePolicy.TYPE_CUSTOM.equals(rp.getRpType()))
{
continue;
}
// check policies for date validity
if (rp.isDateValid())
{
@@ -306,7 +342,7 @@ public class AuthorizeManager
}
if ((rp.getGroupID() != -1)
&& (Group.isMember(c, rp.getGroupID())))
&& (Group.isMember(c, e, rp.getGroupID())))
{
// group was set, and eperson is a member
// of that group
@@ -318,7 +354,26 @@ public class AuthorizeManager
// default authorization is denial
return false;
}
// check whether any bundle belongs to any item that passed submission
// and workflow process
protected static boolean isAnyItemInstalled(Context ctx, Bundle[] bundles)
throws SQLException
{
for (Bundle bundle : bundles)
{
for (Item item : bundle.getItems())
{
if (WorkspaceItem.findByItem(ctx, item) == null
&& WorkflowItem.findByItem(ctx, item) == null)
{
return true;
}
}
}
return false;
}
///////////////////////////////////////////////
// admin check methods
///////////////////////////////////////////////
@@ -480,7 +535,9 @@ public class AuthorizeManager
rp.update();
c.turnOffAuthorisationSystem();
o.updateLastModified();
c.restoreAuthSystemState();
}
/**
@@ -534,8 +591,10 @@ public class AuthorizeManager
rp.setRpType(type);
rp.update();
c.turnOffAuthorisationSystem();
o.updateLastModified();
c.restoreAuthSystemState();
}
/**
@@ -799,7 +858,9 @@ public class AuthorizeManager
drp.update();
}
c.turnOffAuthorisationSystem();
dest.updateLastModified();
c.restoreAuthSystemState();
}
/**
@@ -815,12 +876,14 @@ public class AuthorizeManager
public static void removeAllPolicies(Context c, DSpaceObject o)
throws SQLException
{
o.updateLastModified();
// FIXME: authorization check?
DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE "
+ "resource_type_id= ? AND resource_id= ? ",
o.getType(), o.getID());
c.turnOffAuthorisationSystem();
o.updateLastModified();
c.restoreAuthSystemState();
}
/**
@@ -837,7 +900,7 @@ public class AuthorizeManager
throws SQLException
{
DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE "
+ "resource_type_id= ? AND resource_id= ? AND rptype <> ? ",
+ "resource_type_id= ? AND resource_id= ? AND (rptype <> ? OR rptype IS NULL)",
o.getType(), o.getID(), type);
}
@@ -861,6 +924,29 @@ public class AuthorizeManager
+ "resource_type_id= ? AND resource_id= ? AND rptype=? ",
o.getType(), o.getID(), type);
}
/**
* Change all the policies related to the action (fromPolicy) of the
* specified object to the new action (toPolicy)
*
* @param context
* @param dso
* the dspace object
* @param fromAction
* the action to change
* @param toAction
* the new action to set
* @throws SQLException
* @throws AuthorizeException
*/
public static void switchPoliciesAction(Context context, DSpaceObject dso, int fromAction, int toAction)
throws SQLException, AuthorizeException {
List<ResourcePolicy> rps = getPoliciesActionFilter(context, dso, fromAction);
for (ResourcePolicy rp : rps) {
rp.setAction(toAction);
rp.update();
}
}
/**
* Remove all policies from an object that match a given action. FIXME
@@ -879,7 +965,6 @@ public class AuthorizeManager
public static void removePoliciesActionFilter(Context context,
DSpaceObject dso, int actionID) throws SQLException
{
dso.updateLastModified();
if (actionID == -1)
{
// remove all policies from object
@@ -891,6 +976,10 @@ public class AuthorizeManager
"resource_id= ? AND action_id= ? ",
dso.getType(), dso.getID(), actionID);
}
context.turnOffAuthorisationSystem();
dso.updateLastModified();
context.restoreAuthSystemState();
}
/**
@@ -927,11 +1016,13 @@ public class AuthorizeManager
public static void removeGroupPolicies(Context c, DSpaceObject o, Group g)
throws SQLException
{
o.updateLastModified();
DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE "
+ "resource_type_id= ? AND resource_id= ? AND epersongroup_id= ? ",
o.getType(), o.getID(), g.getID());
c.turnOffAuthorisationSystem();
o.updateLastModified();
c.restoreAuthSystemState();
}
/**
@@ -950,10 +1041,13 @@ public class AuthorizeManager
public static void removeEPersonPolicies(Context c, DSpaceObject o, EPerson e)
throws SQLException
{
o.updateLastModified();
DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE "
+ "resource_type_id= ? AND resource_id= ? AND eperson_id= ? ",
o.getType(), o.getID(), e.getID());
c.turnOffAuthorisationSystem();
o.updateLastModified();
c.restoreAuthSystemState();
}
/**

View File

@@ -36,7 +36,7 @@ public class FixDefaultPolicies
Context c = new Context();
// turn off authorization
c.setIgnoreAuthorization(true);
c.turnOffAuthorisationSystem();
//////////////////////
// carnage begins here

View File

@@ -63,7 +63,7 @@ public class PolicySet
Context c = new Context();
// turn off authorization
c.setIgnoreAuthorization(true);
c.turnOffAuthorisationSystem();
//////////////////////
// carnage begins here

View File

@@ -354,7 +354,7 @@ public class BrowserScope
*/
public void setResultsPerPage(int resultsPerPage)
{
if (resultsPerPage > -1 || browseIndex.isTagCloudEnabled())
if (resultsPerPage > -1 || (browseIndex != null && browseIndex.isTagCloudEnabled()))
{
this.resultsPerPage = resultsPerPage;
}

View File

@@ -32,6 +32,8 @@ import org.dspace.utils.DSpace;
*
* @author Andrea Bollini (CILEA)
* @author Adán Román Ruiz at arvo.es (bugfix)
* @author Panagiotis Koutsourakis (National Documentation Centre) (bugfix)
* @author Kostas Stamatis (National Documentation Centre) (bugfix)
*
*/
public class SolrBrowseDAO implements BrowseDAO
@@ -336,6 +338,22 @@ public class SolrBrowseDAO implements BrowseDAO
addStatusFilter(query);
query.setMaxResults(0);
query.addFilterQueries("search.resourcetype:" + Constants.ITEM);
// We need to take into account the fact that we may be in a subset of the items
if (authority != null)
{
query.addFilterQueries("{!field f="+facetField + "_authority_filter}"
+ authority);
}
else if (this.value != null && !valuePartial)
{
query.addFilterQueries("{!field f="+facetField + "_value_filter}" + this.value);
}
else if (valuePartial)
{
query.addFilterQueries("{!field f="+facetField + "_partial}" + this.value);
}
if (isAscending)
{
query.setQuery("bi_"+column + "_sort" + ": [* TO \"" + value + "\"}");
@@ -343,6 +361,7 @@ public class SolrBrowseDAO implements BrowseDAO
else
{
query.setQuery("bi_" + column + "_sort" + ": {\"" + value + "\" TO *]");
query.addFilterQueries("-(bi_" + column + "_sort" + ":" + value + "*)");
}
boolean includeUnDiscoverable = itemsWithdrawn || !itemsDiscoverable;
DiscoverResult resp = null;

View File

@@ -28,6 +28,7 @@ import org.dspace.workflow.WorkflowItem;
import org.dspace.xmlworkflow.storedcomponents.CollectionRole;
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
import java.io.Serializable;
import java.io.IOException;
import java.io.InputStream;
import java.sql.PreparedStatement;
@@ -47,7 +48,6 @@ import java.util.*;
* effect.
*
* @author Robert Tansley
* @version $Revision$
*/
public class Collection extends DSpaceObject
{
@@ -294,31 +294,48 @@ public class Collection extends DSpaceObject
* @return the collections in the system
* @throws SQLException
*/
public static Collection[] findAll(Context context) throws SQLException {
public static Collection[] findAll(Context context) throws SQLException
{
TableRowIterator tri = null;
try {
String query = "SELECT c.* FROM collection c " +
"LEFT JOIN metadatavalue m on (m.resource_id = c.collection_id and m.resource_type_id = ? and m.metadata_field_id = ?) ";
if(DatabaseManager.isOracle()){
query += " ORDER BY cast(m.text_value as varchar2(128))";
}else{
query += " ORDER BY m.text_value";
}
List<Collection> collections = null;
List<Serializable> params = new ArrayList<Serializable>();
StringBuffer query = new StringBuffer(
"SELECT c.*" +
"FROM collection c " +
"LEFT JOIN metadatavalue m ON (" +
"m.resource_id = c.collection_id AND " +
"m.resource_type_id = ? AND " +
"m.metadata_field_id = ?" +
")"
);
tri = DatabaseManager.query(context,
query,
Constants.COLLECTION,
MetadataField.findByElement(context, MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(), "title", null).getFieldID()
);
} catch (SQLException e) {
log.error("Find all Collections - ",e);
throw e;
if (DatabaseManager.isOracle())
{
query.append(" ORDER BY cast(m.text_value as varchar2(128))");
}
else
{
query.append(" ORDER BY m.text_value");
}
List<Collection> collections = new ArrayList<Collection>();
params.add(Constants.COLLECTION);
params.add(
MetadataField.findByElement(
context,
MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(),
"title",
null
).getFieldID()
);
try
{
tri = DatabaseManager.query(
context, query.toString(), params.toArray()
);
collections = new ArrayList<Collection>();
while (tri.hasNext())
{
TableRow row = tri.next();
@@ -337,6 +354,11 @@ public class Collection extends DSpaceObject
}
}
}
catch (SQLException e)
{
log.error("Find all Collections - ", e);
throw e;
}
finally
{
// close the TableRowIterator to free up resources
@@ -363,31 +385,47 @@ public class Collection extends DSpaceObject
public static Collection[] findAll(Context context, Integer limit, Integer offset) throws SQLException
{
TableRowIterator tri = null;
try{
String query = "SELECT c.* FROM collection c " +
"LEFT JOIN metadatavalue m on (m.resource_id = c.collection_id and m.resource_type_id = ? and m.metadata_field_id = ?) ";
List<Collection> collections = null;
List<Serializable> params = new ArrayList<Serializable>();
StringBuffer query = new StringBuffer(
"SELECT c.*" +
"FROM collection c " +
"LEFT JOIN metadatavalue m ON (" +
"m.resource_id = c.collection_id AND " +
"m.resource_type_id = ? AND " +
"m.metadata_field_id = ?" +
")"
);
if(DatabaseManager.isOracle()){
query += " ORDER BY cast(m.text_value as varchar2(128))";
}else{
query += " ORDER BY m.text_value";
}
query += " limit ? offset ?";
tri = DatabaseManager.query(context,
query,
Constants.COLLECTION,
MetadataField.findByElement(context, MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(), "title", null).getFieldID(),
limit,
offset
);
} catch (SQLException e) {
log.error("Find all Collections offset/limit - ",e);
throw e;
if (DatabaseManager.isOracle())
{
query.append(" ORDER BY cast(m.text_value as varchar2(128))");
}
List<Collection> collections = new ArrayList<Collection>();
else
{
query.append(" ORDER BY m.text_value");
}
params.add(Constants.COLLECTION);
params.add(
MetadataField.findByElement(
context,
MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(),
"title",
null
).getFieldID()
);
DatabaseManager.applyOffsetAndLimit(query, params, offset, limit);
try
{
tri = DatabaseManager.query(
context, query.toString(), params.toArray()
);
collections = new ArrayList<Collection>();
while (tri.hasNext())
{
TableRow row = tri.next();
@@ -406,6 +444,11 @@ public class Collection extends DSpaceObject
}
}
}
catch (SQLException e)
{
log.error("Find all Collections offset/limit - ", e);
throw e;
}
finally
{
// close the TableRowIterator to free up resources
@@ -450,13 +493,20 @@ public class Collection extends DSpaceObject
*/
public ItemIterator getItems(Integer limit, Integer offset) throws SQLException
{
String myQuery = "SELECT item.* FROM item, collection2item WHERE "
+ "item.item_id=collection2item.item_id AND "
+ "collection2item.collection_id= ? "
+ "AND item.in_archive='1' limit ? offset ?";
List<Serializable> params = new ArrayList<Serializable>();
StringBuffer myQuery = new StringBuffer(
"SELECT item.* " +
"FROM item, collection2item " +
"WHERE item.item_id = collection2item.item_id " +
"AND collection2item.collection_id = ? " +
"AND item.in_archive = '1'"
);
TableRowIterator rows = DatabaseManager.queryTable(ourContext, "item",
myQuery,getID(), limit, offset);
params.add(getID());
DatabaseManager.applyOffsetAndLimit(myQuery, params, offset, limit);
TableRowIterator rows = DatabaseManager.query(ourContext,
myQuery.toString(), params.toArray());
return new ItemIterator(ourContext, rows);
}
@@ -679,8 +729,6 @@ public class Collection extends DSpaceObject
g.setName("COLLECTION_" + getID() + "_WORKFLOW_STEP_" + step);
g.update();
setWorkflowGroup(step, g);
AuthorizeManager.addPolicy(ourContext, this, Constants.ADD, g);
}
return workflowGroup[step - 1];
@@ -689,26 +737,82 @@ public class Collection extends DSpaceObject
/**
* Set the workflow group corresponding to a particular workflow step.
* <code>null</code> can be passed in if there should be no associated
* group for that workflow step; any existing group is NOT deleted.
* group for that workflow step. Any existing group is NOT deleted.
*
* @param step
* the workflow step (1-3)
* @param g
* @param newGroup
* the new workflow group, or <code>null</code>
* @throws java.sql.SQLException passed through.
* @throws org.dspace.authorize.AuthorizeException passed through.
*/
public void setWorkflowGroup(int step, Group g)
public void setWorkflowGroup(int step, Group newGroup)
throws SQLException, AuthorizeException
{
workflowGroup[step - 1] = g;
if (g == null)
Group oldGroup = getWorkflowGroup(step);
String stepColumn;
int action;
switch(step)
{
collectionRow.setColumnNull("workflow_step_" + step);
case 1:
action = Constants.WORKFLOW_STEP_1;
stepColumn = "workflow_step_1";
break;
case 2:
action = Constants.WORKFLOW_STEP_2;
stepColumn = "workflow_step_2";
break;
case 3:
action = Constants.WORKFLOW_STEP_3;
stepColumn = "workflow_step_3";
break;
default:
throw new IllegalArgumentException("Illegal step count: " + step);
}
workflowGroup[step-1] = newGroup;
if (newGroup != null)
collectionRow.setColumn(stepColumn, newGroup.getID());
else
{
collectionRow.setColumn("workflow_step_" + step, g.getID());
}
collectionRow.setColumnNull(stepColumn);
modified = true;
// Deal with permissions.
try {
ourContext.turnOffAuthorisationSystem();
// remove the policies for the old group
if (oldGroup != null)
{
List<ResourcePolicy> oldPolicies = AuthorizeManager
.getPoliciesActionFilter(ourContext, this, action);
int oldGroupID = oldGroup.getID();
for (ResourcePolicy rp : oldPolicies)
{
if (rp.getGroupID() == oldGroupID)
rp.delete();
}
oldPolicies = AuthorizeManager
.getPoliciesActionFilter(ourContext, this, Constants.ADD);
for (ResourcePolicy rp : oldPolicies)
{
if ((rp.getGroupID() == oldGroupID)
&& ResourcePolicy.TYPE_WORKFLOW.equals(rp.getRpType()))
rp.delete();
}
}
// New group can be null to delete workflow step.
// We need to grant permissions if new group is not null.
if (newGroup != null)
{
AuthorizeManager.addPolicy(ourContext, this, action, newGroup,
ResourcePolicy.TYPE_WORKFLOW);
AuthorizeManager.addPolicy(ourContext, this, Constants.ADD, newGroup,
ResourcePolicy.TYPE_WORKFLOW);
}
} finally {
ourContext.restoreAuthSystemState();
}
}
/**
@@ -1513,7 +1617,7 @@ public class Collection extends DSpaceObject
public static Collection[] findAuthorizedOptimized(Context context, int actionID) throws java.sql.SQLException
{
if(! ConfigurationManager.getBooleanProperty("org.dspace.content.Collection.findAuthorizedPerformanceOptimize", true)) {
if(! ConfigurationManager.getBooleanProperty("org.dspace.content.Collection.findAuthorizedPerformanceOptimize", false)) {
// Fallback to legacy query if config says so. The rationale could be that a site found a bug.
return findAuthorized(context, null, actionID);
}

View File

@@ -280,7 +280,7 @@ public class Community extends DSpaceObject
{
while (tri.hasNext())
{
TableRow row = tri.next();
TableRow row = tri.next(context);
// First check the cache
Community fromCache = (Community) context.fromCache(
@@ -350,7 +350,7 @@ public class Community extends DSpaceObject
{
while (tri.hasNext())
{
TableRow row = tri.next();
TableRow row = tri.next(context);
// First check the cache
Community fromCache = (Community) context.fromCache(
@@ -683,7 +683,7 @@ public class Community extends DSpaceObject
{
while (tri.hasNext())
{
TableRow row = tri.next();
TableRow row = tri.next(ourContext);
// First check the cache
Collection fromCache = (Collection) ourContext.fromCache(
@@ -757,7 +757,7 @@ public class Community extends DSpaceObject
{
while (tri.hasNext())
{
TableRow row = tri.next();
TableRow row = tri.next(ourContext);
// First check the cache
Community fromCache = (Community) ourContext.fromCache(
@@ -812,7 +812,7 @@ public class Community extends DSpaceObject
{
if (tri.hasNext())
{
TableRow row = tri.next();
TableRow row = tri.next(ourContext);
// First check the cache
Community fromCache = (Community) ourContext.fromCache(

View File

@@ -60,6 +60,8 @@ public class InstallItem
IOException, AuthorizeException
{
Item item = is.getItem();
Collection collection = is.getCollection();
IdentifierService identifierService = new DSpace().getSingletonService(IdentifierService.class);
try {
if(suppliedHandle == null)
@@ -74,7 +76,15 @@ public class InstallItem
populateMetadata(c, item);
return finishItem(c, item, is);
// Finish up / archive the item
item = finishItem(c, item, is);
// As this is a BRAND NEW item, as a final step we need to remove the
// submitter item policies created during deposit and replace them with
// the default policies from the collection.
item.inheritCollectionDefaultPolicies(collection);
return item;
}
/**
@@ -205,8 +215,18 @@ public class InstallItem
item.addDC("description", "provenance", "en", provDescription);
}
// final housekeeping when adding new Item to archive
// common between installing and "restoring" items.
/**
* Final housekeeping when adding a new Item into the archive.
* This method is used by *both* installItem() and restoreItem(),
* so all actions here will be run for a newly added item or a restored item.
*
* @param c DSpace Context
* @param item Item in question
* @param is InProgressSubmission object
* @return final "archived" Item
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
*/
private static Item finishItem(Context c, Item item, InProgressSubmission is)
throws SQLException, IOException, AuthorizeException
{
@@ -229,10 +249,6 @@ public class InstallItem
// remove in-progress submission
is.deleteWrapper();
// remove the item's policies and replace them with
// the defaults from the collection
item.inheritCollectionDefaultPolicies(is.getCollection());
// set embargo lift date and take away read access if indicated.
EmbargoManager.setEmbargo(c, item);

View File

@@ -24,14 +24,15 @@ import org.dspace.authorize.AuthorizeManager;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.browse.BrowseException;
import org.dspace.browse.IndexBrowse;
import org.dspace.content.authority.ChoiceAuthorityManager;
import org.dspace.content.authority.Choices;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.content.authority.Choices;
import org.dspace.content.authority.ChoiceAuthorityManager;
import org.dspace.event.Event;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.event.Event;
import org.dspace.handle.HandleManager;
import org.dspace.identifier.IdentifierException;
import org.dspace.identifier.IdentifierService;
@@ -40,6 +41,8 @@ import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator;
import org.dspace.utils.DSpace;
import org.dspace.versioning.VersioningService;
import org.dspace.workflow.WorkflowItem;
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
/**
* Class representing an item in DSpace.
@@ -263,7 +266,7 @@ public class Item extends DSpaceObject
}
String query = "SELECT item.* FROM metadatavalue,item WHERE item.in_archive='1' " +
"AND item.item_id = metadatavalue.item_id AND metadata_field_id = ?";
"AND item.item_id = metadatavalue.resource_id AND metadatavalue.resource_type_id=2 AND metadata_field_id = ?";
TableRowIterator rows = null;
if (Item.ANY.equals(authority)) {
rows = DatabaseManager.queryTable(context, "item", query, mdf.getFieldID());
@@ -1158,8 +1161,14 @@ public class Item extends DSpaceObject
ourContext.addEvent(new Event(Event.MODIFY, Constants.ITEM, getID(),
"WITHDRAW", getIdentifiers(ourContext)));
// remove all authorization policies, saving the custom ones
AuthorizeManager.removeAllPoliciesByDSOAndTypeNotEqualsTo(ourContext, this, ResourcePolicy.TYPE_CUSTOM);
// switch all READ authorization policies to WITHDRAWN_READ
AuthorizeManager.switchPoliciesAction(ourContext, this, Constants.READ, Constants.WITHDRAWN_READ);
for (Bundle bnd : this.getBundles()) {
AuthorizeManager.switchPoliciesAction(ourContext, bnd, Constants.READ, Constants.WITHDRAWN_READ);
for (Bitstream bs : bnd.getBitstreams()) {
AuthorizeManager.switchPoliciesAction(ourContext, bs, Constants.READ, Constants.WITHDRAWN_READ);
}
}
// Write log
log.info(LogManager.getHeader(ourContext, "withdraw_item", "user="
@@ -1217,16 +1226,28 @@ public class Item extends DSpaceObject
ourContext.addEvent(new Event(Event.MODIFY, Constants.ITEM, getID(),
"REINSTATE", getIdentifiers(ourContext)));
// authorization policies
if (colls.length > 0)
{
// FIXME: not multiple inclusion friendly - just apply access
// policies from first collection
// remove the item's policies and replace them with
// the defaults from the collection
inheritCollectionDefaultPolicies(colls[0]);
// restore all WITHDRAWN_READ authorization policies back to READ
for (Bundle bnd : this.getBundles()) {
AuthorizeManager.switchPoliciesAction(ourContext, bnd, Constants.WITHDRAWN_READ, Constants.READ);
for (Bitstream bs : bnd.getBitstreams()) {
AuthorizeManager.switchPoliciesAction(ourContext, bs, Constants.WITHDRAWN_READ, Constants.READ);
}
}
// check if the item was withdrawn before the fix DS-3097
if (AuthorizeManager.getPoliciesActionFilter(ourContext, this, Constants.WITHDRAWN_READ).size() != 0) {
AuthorizeManager.switchPoliciesAction(ourContext, this, Constants.WITHDRAWN_READ, Constants.READ);
}
else {
// authorization policies
if (colls.length > 0)
{
// remove the item's policies and replace them with
// the defaults from the collection
adjustItemPolicies(getOwningCollection());
}
}
// Write log
log.info(LogManager.getHeader(ourContext, "reinstate_item", "user="
+ e.getEmail() + ",item_id=" + getID()));
@@ -1254,9 +1275,6 @@ public class Item extends DSpaceObject
log.info(LogManager.getHeader(ourContext, "delete_item", "item_id="
+ getID()));
// Remove from cache
ourContext.removeCached(this, getID());
// Remove from browse indices, if appropriate
/** XXX FIXME
** Although all other Browse index updates are managed through
@@ -1303,6 +1321,8 @@ public class Item extends DSpaceObject
// remove version attached to the item
removeVersion();
// Remove from cache
ourContext.removeCached(this, getID());
// Finally remove item row
DatabaseManager.delete(ourContext, itemRow);
@@ -1750,7 +1770,12 @@ public class Item extends DSpaceObject
// is this collection not yet created, and an item template is created
if (getOwningCollection() == null)
{
return true;
if (!isInProgressSubmission()) {
return true;
}
else {
return false;
}
}
// is this person an COLLECTION_EDITOR for the owning collection?
@@ -1762,6 +1787,20 @@ public class Item extends DSpaceObject
return false;
}
/**
* Check if the item is an inprogress submission
* @param context
* @param item
* @return <code>true</code> if the item is an inprogress submission, i.e. a WorkspaceItem or WorkflowItem
* @throws SQLException
*/
public boolean isInProgressSubmission() throws SQLException {
return WorkspaceItem.findByItem(ourContext, this) != null ||
((ConfigurationManager.getProperty("workflow", "workflow.framework").equals("xmlworkflow")
&& XmlWorkflowItem.findByItem(ourContext, this) != null)
|| WorkflowItem.findByItem(ourContext, this) != null);
}
public String getName()
{
return getMetadataFirstValue(MetadataSchema.DC_SCHEMA, "title", null, Item.ANY);

View File

@@ -81,6 +81,7 @@ public class WorkspaceItem implements InProgressSubmission
* ID of the workspace item
*
* @return the workspace item, or null if the ID is invalid.
* @throws java.sql.SQLException passed through.
*/
public static WorkspaceItem find(Context context, int id)
throws SQLException
@@ -131,6 +132,9 @@ public class WorkspaceItem implements InProgressSubmission
* of the collection's template item
*
* @return the newly created workspace item
* @throws org.dspace.authorize.AuthorizeException passed through.
* @throws java.sql.SQLException passed through.
* @throws java.io.IOException passed through.
*/
public static WorkspaceItem create(Context c, Collection coll,
boolean template) throws AuthorizeException, SQLException,
@@ -140,98 +144,49 @@ public class WorkspaceItem implements InProgressSubmission
AuthorizeManager.authorizeAction(c, coll, Constants.ADD);
// Create an item
Item i = Item.create(c);
i.setSubmitter(c.getCurrentUser());
Item item = Item.create(c);
item.setSubmitter(c.getCurrentUser());
// Now create the policies for the submitter and workflow
// users to modify item and contents
// Now create the policies for the submitter to modify item and contents.
// contents = bitstreams, bundles
// FIXME: icky hardcoded workflow steps
Group step1group = coll.getWorkflowGroup(1);
Group step2group = coll.getWorkflowGroup(2);
Group step3group = coll.getWorkflowGroup(3);
EPerson e = c.getCurrentUser();
EPerson submitter = c.getCurrentUser();
// read permission
AuthorizeManager.addPolicy(c, i, Constants.READ, e, ResourcePolicy.TYPE_SUBMISSION);
// Add policies for the submitter
AuthorizeManager.addPolicy(c, item, Constants.READ, submitter, ResourcePolicy.TYPE_SUBMISSION);
AuthorizeManager.addPolicy(c, item, Constants.WRITE, submitter, ResourcePolicy.TYPE_SUBMISSION);
AuthorizeManager.addPolicy(c, item, Constants.ADD, submitter, ResourcePolicy.TYPE_SUBMISSION);
AuthorizeManager.addPolicy(c, item, Constants.REMOVE, submitter, ResourcePolicy.TYPE_SUBMISSION);
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("originalworkflow")) {
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("originalworkflow"))
{
// Add policies for the workflow step administrative groups
if (step1group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.READ, step1group, ResourcePolicy.TYPE_WORKFLOW);
AuthorizeManager.addPolicy(c, item, Constants.READ, step1group, ResourcePolicy.TYPE_WORKFLOW);
AuthorizeManager.addPolicy(c, item, Constants.WRITE, step1group, ResourcePolicy.TYPE_WORKFLOW);
AuthorizeManager.addPolicy(c, item, Constants.ADD, step1group, ResourcePolicy.TYPE_WORKFLOW);
AuthorizeManager.addPolicy(c, item, Constants.REMOVE, step1group, ResourcePolicy.TYPE_WORKFLOW);
}
if (step2group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.READ, step2group, ResourcePolicy.TYPE_WORKFLOW);
AuthorizeManager.addPolicy(c, item, Constants.READ, step2group, ResourcePolicy.TYPE_WORKFLOW);
AuthorizeManager.addPolicy(c, item, Constants.WRITE, step2group, ResourcePolicy.TYPE_WORKFLOW);
AuthorizeManager.addPolicy(c, item, Constants.ADD, step2group, ResourcePolicy.TYPE_WORKFLOW);
AuthorizeManager.addPolicy(c, item, Constants.REMOVE, step2group, ResourcePolicy.TYPE_WORKFLOW);
}
if (step3group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.READ, step3group, ResourcePolicy.TYPE_WORKFLOW);
}
}
// write permission
AuthorizeManager.addPolicy(c, i, Constants.WRITE, e, ResourcePolicy.TYPE_SUBMISSION);
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("originalworkflow")) {
if (step1group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.WRITE, step1group, ResourcePolicy.TYPE_WORKFLOW);
}
if (step2group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.WRITE, step2group, ResourcePolicy.TYPE_WORKFLOW);
}
if (step3group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.WRITE, step3group, ResourcePolicy.TYPE_WORKFLOW);
}
}
// add permission
AuthorizeManager.addPolicy(c, i, Constants.ADD, e, ResourcePolicy.TYPE_SUBMISSION);
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("originalworkflow")) {
if (step1group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.ADD, step1group, ResourcePolicy.TYPE_WORKFLOW);
}
if (step2group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.ADD, step2group, ResourcePolicy.TYPE_WORKFLOW);
}
if (step3group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.ADD, step3group, ResourcePolicy.TYPE_WORKFLOW);
}
}
// remove contents permission
AuthorizeManager.addPolicy(c, i, Constants.REMOVE, e, ResourcePolicy.TYPE_SUBMISSION);
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("originalworkflow")) {
if (step1group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.REMOVE, step1group, ResourcePolicy.TYPE_WORKFLOW);
}
if (step2group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.REMOVE, step2group, ResourcePolicy.TYPE_WORKFLOW);
}
if (step3group != null)
{
AuthorizeManager.addPolicy(c, i, Constants.REMOVE, step3group, ResourcePolicy.TYPE_WORKFLOW);
AuthorizeManager.addPolicy(c, item, Constants.READ, step3group, ResourcePolicy.TYPE_WORKFLOW);
AuthorizeManager.addPolicy(c, item, Constants.WRITE, step3group, ResourcePolicy.TYPE_WORKFLOW);
AuthorizeManager.addPolicy(c, item, Constants.ADD, step3group, ResourcePolicy.TYPE_WORKFLOW);
AuthorizeManager.addPolicy(c, item, Constants.REMOVE, step3group, ResourcePolicy.TYPE_WORKFLOW);
}
}
@@ -244,22 +199,22 @@ public class WorkspaceItem implements InProgressSubmission
for (int n = 0; n < md.length; n++)
{
i.addMetadata(md[n].schema, md[n].element, md[n].qualifier, md[n].language,
item.addMetadata(md[n].schema, md[n].element, md[n].qualifier, md[n].language,
md[n].value);
}
}
i.update();
item.update();
// Create the workspace item row
TableRow row = DatabaseManager.row("workspaceitem");
row.setColumn("item_id", i.getID());
row.setColumn("item_id", item.getID());
row.setColumn("collection_id", coll.getID());
log.info(LogManager.getHeader(c, "create_workspace_item",
"workspace_item_id=" + row.getIntColumn("workspace_item_id")
+ "item_id=" + i.getID() + "collection_id="
+ "item_id=" + item.getID() + "collection_id="
+ coll.getID()));
DatabaseManager.insert(c, row);
@@ -280,6 +235,7 @@ public class WorkspaceItem implements InProgressSubmission
* the eperson
*
* @return the corresponding workspace items
* @throws java.sql.SQLException passed through.
*/
public static WorkspaceItem[] findByEPerson(Context context, EPerson ep)
throws SQLException
@@ -332,6 +288,7 @@ public class WorkspaceItem implements InProgressSubmission
* the collection
*
* @return the corresponding workspace items
* @throws java.sql.SQLException passed through.
*/
public static WorkspaceItem[] findByCollection(Context context, Collection c)
throws SQLException
@@ -384,6 +341,7 @@ public class WorkspaceItem implements InProgressSubmission
* the item
*
* @return workflow item corresponding to the item, or null
* @throws java.sql.SQLException passed through.
*/
public static WorkspaceItem findByItem(Context context, Item i)
throws SQLException
@@ -408,6 +366,7 @@ public class WorkspaceItem implements InProgressSubmission
* @param context the context object
*
* @return all workspace items
* @throws java.sql.SQLException passed through.
*/
public static WorkspaceItem[] findAll(Context context)
throws SQLException
@@ -505,6 +464,8 @@ public class WorkspaceItem implements InProgressSubmission
/**
* Update the workspace item, including the unarchived item.
* @throws java.sql.SQLException passed through.
* @throws org.dspace.authorize.AuthorizeException passed through.
*/
public void update() throws SQLException, AuthorizeException
{
@@ -554,6 +515,10 @@ public class WorkspaceItem implements InProgressSubmission
* Delete the workspace item. The entry in workspaceitem, the unarchived
* item and its contents are all removed (multiple inclusion
* notwithstanding.)
* @throws java.sql.SQLException passed through.
* @throws org.dspace.authorize.AuthorizeException
* if not original submitter or an administrator.
* @throws java.io.IOException passed through.
*/
public void deleteAll() throws SQLException, AuthorizeException,
IOException

View File

@@ -7,9 +7,6 @@
*/
package org.dspace.content.authority;
import org.dspace.authority.AuthoritySearchService;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.rest.RestSource;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery;
@@ -17,6 +14,9 @@ import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.params.CommonParams;
import org.dspace.authority.AuthoritySearchService;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.SolrAuthorityInterface;
import org.dspace.core.ConfigurationManager;
import org.dspace.utils.DSpace;
@@ -35,7 +35,7 @@ import java.util.Map;
public class SolrAuthority implements ChoiceAuthority {
private static final Logger log = Logger.getLogger(SolrAuthority.class);
private RestSource source = new DSpace().getServiceManager().getServiceByName("AuthoritySource", RestSource.class);
protected SolrAuthorityInterface source = new DSpace().getServiceManager().getServiceByName("AuthoritySource", SolrAuthorityInterface.class);
private boolean externalResults = false;
public Choices getMatches(String field, String text, int collection, int start, int limit, String locale, boolean bestMatch) {

View File

@@ -0,0 +1,44 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.crosswalk;
import org.dspace.core.Context;
import java.sql.SQLException;
/**
* Created by jonas - jonas@atmire.com on 21/04/17.
* Implementation of the {@link DisseminationCrosswalk} interface that enables the ability to set a Context manually
*/
public abstract class ContextAwareDisseminationCrosswalk implements DisseminationCrosswalk{
private Context context;
private boolean contextCreatedInternally = false;
public void setContext(Context context){
this.context = context;
}
public Context getContext() throws SQLException {
if(context == null|| !context.isValid()){
context=new Context();
contextCreatedInternally = true;
}
return context;
}
public void handleContextCleanup() throws SQLException {
if(contextCreatedInternally){
context.complete();
}else{
context.commit();
}
}
}

View File

@@ -27,7 +27,10 @@ import org.dspace.license.CreativeCommons;
*
* @author Larry Stone
* @version $Revision: 1.0 $
*
* @deprecated to make uniforme JSPUI and XMLUI approach the bitstream with the license in the textual format it is no longer stored see https://jira.duraspace.org/browse/DS-2604
*/
@Deprecated
public class CreativeCommonsTextStreamDisseminationCrosswalk
implements StreamDisseminationCrosswalk
{

View File

@@ -7,15 +7,15 @@
*/
package org.dspace.content.crosswalk;
import java.io.IOException;
import java.sql.SQLException;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.jdom.Element;
import org.jdom.Namespace;
import java.io.IOException;
import java.sql.SQLException;
import java.util.List;
/**
* Dissemination Crosswalk plugin -- translate DSpace native
* metadata into an external XML format.
@@ -107,6 +107,9 @@ public interface DisseminationCrosswalk
* Execute crosswalk, returning one XML root element as
* a JDOM <code>Element</code> object.
* This is typically the root element of a document.
* Note that, if the implementing class is of type "{@link org.dspace.content.crosswalk.ContextAwareDisseminationCrosswalk}"
* and a context is present in the method call, you should set the context before calling this method. -> "{@link org.dspace.content.crosswalk.ContextAwareDisseminationCrosswalk#setContext(org.dspace.core.Context)}"
* The implementing class should then use the "{@link ContextAwareDisseminationCrosswalk#getContext()}" and "{@link ContextAwareDisseminationCrosswalk#handleContextCleanup()}" to retrieve and commit/complete the context respectively
* <p>
*
* @param dso the DSpace Object whose metadata to export.

View File

@@ -7,17 +7,6 @@
*/
package org.dspace.content.crosswalk;
import java.io.IOException;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Date;
import java.text.SimpleDateFormat;
import java.util.logging.Level;
import org.apache.commons.lang.ArrayUtils;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
@@ -33,6 +22,12 @@ import org.dspace.eperson.Group;
import org.jdom.Element;
import org.jdom.Namespace;
import java.io.IOException;
import java.sql.SQLException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
/**
* METSRights Ingestion & Dissemination Crosswalk
* <p>
@@ -51,7 +46,7 @@ import org.jdom.Namespace;
* @author Tim Donohue
* @version $Revision: 2108 $
*/
public class METSRightsCrosswalk
public class METSRightsCrosswalk extends ContextAwareDisseminationCrosswalk
implements IngestionCrosswalk, DisseminationCrosswalk
{
/** log4j category */
@@ -120,14 +115,14 @@ public class METSRightsCrosswalk
* METSRights PermissionTypes.
*
* @param dso DSpace Object
* @param context Context Object
* @return XML Element corresponding to the new <RightsDeclarationMD> translation
* @throws CrosswalkException
* @throws IOException
* @throws SQLException
* @throws AuthorizeException
*/
@Override
public Element disseminateElement(DSpaceObject dso)
public Element disseminateElement(Context context,DSpaceObject dso)
throws CrosswalkException,
IOException, SQLException, AuthorizeException
{
@@ -156,7 +151,6 @@ public class METSRightsCrosswalk
// what those rights are -- too many types of content can be stored in DSpace
//Get all policies on this DSpace Object
Context context = new Context();
List<ResourcePolicy> policies = AuthorizeManager.getPolicies(context, dso);
//For each DSpace policy
@@ -282,9 +276,31 @@ public class METSRightsCrosswalk
}//end for each policy
context.complete();
return rightsMD;
}
/**
* Actually Disseminate into METSRights schema. This method locates all DSpace
* policies (permissions) for the provided object, and translates them into
* METSRights PermissionTypes.
*
* @param dso DSpace Object
* @return XML Element corresponding to the new <RightsDeclarationMD> translation
* @throws CrosswalkException
* @throws IOException
* @throws SQLException
* @throws AuthorizeException
* @deprecated Do not use this method, please opt for "{@link #disseminateElement(Context context, DSpaceObject dso)}" instead, as this does not internally need to create a new Context
*/
@Override
@Deprecated
public Element disseminateElement(DSpaceObject dso)
throws CrosswalkException,
IOException, SQLException, AuthorizeException {
Context context = getContext();
Element element = disseminateElement(context, dso);
handleContextCleanup();
return element;
}
@Override
public List<Element> disseminateList(DSpaceObject dso)
@@ -435,134 +451,81 @@ public class METSRightsCrosswalk
public void ingest(Context context, DSpaceObject dso, List<Element> ml)
throws CrosswalkException, IOException, SQLException, AuthorizeException
{
// we cannot crosswalk METSRights to a SITE object
// SITE objects are not supported by the METSRightsCrosswalk
if (dso.getType() == Constants.SITE)
{
throw new CrosswalkObjectNotSupported("Wrong target object type, METSRightsCrosswalk cannot crosswalk a SITE object.");
}
//First, clear all existing Policies on this DSpace Object
// as we don't want them to conflict with policies we will be adding
if(!ml.isEmpty())
// If we're fed the top-level <RightsDeclarationMD> wrapper element, recurse into its guts.
// What we need to analyze are the <Context> elements underneath it.
if(!ml.isEmpty() && ml.get(0).getName().equals("RightsDeclarationMD"))
{
AuthorizeManager.removeAllPolicies(context, dso);
ingest(context, dso, ml.get(0).getChildren());
}
// Loop through each Element in the List
List<ResourcePolicy> policies = new ArrayList<ResourcePolicy>();
for (Element element : ml)
else
{
// if we're fed a <RightsDeclarationMD> wrapper object, recurse on its guts:
if (element.getName().equals("RightsDeclarationMD"))
// Loop through each <Context> Element in the passed in List, creating a ResourcePolicy for each
List<ResourcePolicy> policies = new ArrayList<>();
for (Element element : ml)
{
ingest(context, dso, element.getChildren());
}
// "Context" section (where permissions are stored)
else if (element.getName().equals("Context"))
{
//get what class of context this is
String contextClass = element.getAttributeValue("CONTEXTCLASS");
if ((element.getAttributeValue("start-date") != null)
|| (element.getAttributeValue("end-date") != null)
|| (element.getAttributeValue("rpName") != null))
{
SimpleDateFormat sdf = new SimpleDateFormat( "yyyy-MM-dd" );
try {
ResourcePolicy rp = ResourcePolicy.create(context);
if (element.getAttributeValue("CONTEXTCLASS").equalsIgnoreCase("GENERAL PUBLIC")) {
Group anonGroup = Group.find(context, 0);
rp.setGroup(anonGroup);
}
else
{
if (element.getAttributeValue("CONTEXTCLASS").equalsIgnoreCase("REPOSITORY MGR")) {
Group adminGroup = Group.find(context, 1);
rp.setGroup(adminGroup);
}
}
if (element.getAttributeValue("rpName") != null)
{
rp.setRpName(element.getAttributeValue("rpName"));
}
try {
if (element.getAttributeValue("start-date") != null)
{
rp.setStartDate(sdf.parse(element.getAttributeValue("start-date")));
}
if (element.getAttributeValue("end-date") != null)
{
rp.setEndDate(sdf.parse(element.getAttributeValue("end-date")));
}
}catch (ParseException ex) {
java.util.logging.Logger.getLogger(METSRightsCrosswalk.class.getName()).log(Level.SEVERE, null, ex);
}
List<Element> le = new ArrayList<Element>(element.getChildren());
for (Element el : le)
{
if ((el.getAttributeValue("DISCOVER").equalsIgnoreCase("true"))
&& (el.getAttributeValue("DISPLAY").equalsIgnoreCase("true")))
{
if (el.getAttributeValue("DELETE").equalsIgnoreCase("false"))
{
if (el.getAttributeValue("MODIFY").equalsIgnoreCase("false"))
{
rp.setAction(Constants.READ);
}
else
{
rp.setAction(Constants.WRITE);
}
}
else
{
if (el.getAttributeValue("MODIFY").equalsIgnoreCase("true"))
{
rp.setAction(Constants.DELETE);
if ((el.getAttributeValue("COPY").equalsIgnoreCase("true"))
&&(el.getAttributeValue("DUPLICATE").equalsIgnoreCase("true"))
&&(el.getAttributeValue("PRINT").equalsIgnoreCase("true")))
{
rp.setAction(Constants.ADMIN);
}
}
}
}
}
policies.add(rp);
} catch (NullPointerException ex) {
java.util.logging.Logger.getLogger(METSRightsCrosswalk.class.getName()).log(Level.SEVERE, null, ex);
}
assignPermissions(context, dso, policies);
}
else
// Must be a "Context" section (where permissions are stored)
if (element.getName().equals("Context"))
{
//also get reference to the <Permissions> element
//get what class of context this is
String contextClass = element.getAttributeValue("CONTEXTCLASS");
ResourcePolicy rp = ResourcePolicy.create(context);
SimpleDateFormat sdf = new SimpleDateFormat( "yyyy-MM-dd" );
// get reference to the <Permissions> element
// Note: we are assuming here that there will only ever be ONE <Permissions>
// element. Currently there are no known use cases for multiple.
Element permsElement = element.getChild("Permissions", METSRights_NS);
if(permsElement == null) {
log.error("No <Permissions> element was found. Skipping this <Context> element.");
continue;
}
if (element.getAttributeValue("rpName") != null)
{
rp.setRpName(element.getAttributeValue("rpName"));
}
try {
if (element.getAttributeValue("start-date") != null)
{
rp.setStartDate(sdf.parse(element.getAttributeValue("start-date")));
}
if (element.getAttributeValue("end-date") != null)
{
rp.setEndDate(sdf.parse(element.getAttributeValue("end-date")));
}
}catch (ParseException ex) {
log.error("Failed to parse embargo date. The date needs to be in the format 'yyyy-MM-dd'.", ex);
}
//Check if this permission pertains to Anonymous users
if(ANONYMOUS_CONTEXTCLASS.equals(contextClass))
{
//get DSpace Anonymous group, ID=0
Group anonGroup = Group.find(context, 0);
Group anonGroup = Group.find(context, Group.ANONYMOUS_ID);
if(anonGroup==null)
{
throw new CrosswalkInternalException("The DSpace database has not been properly initialized. The Anonymous Group is missing from the database.");
}
assignPermissions(context, dso, anonGroup, permsElement);
rp.setGroup(anonGroup);
} // else if this permission declaration pertains to Administrators
else if(ADMIN_CONTEXTCLASS.equals(contextClass))
{
//get DSpace Administrator group, ID=1
Group adminGroup = Group.find(context, 1);
Group adminGroup = Group.find(context, Group.ADMIN_ID);
if(adminGroup==null)
{
throw new CrosswalkInternalException("The DSpace database has not been properly initialized. The Administrator Group is missing from the database.");
}
assignPermissions(context, dso, adminGroup, permsElement);
rp.setGroup(adminGroup);
} // else if this permission pertains to another DSpace group
else if(GROUP_CONTEXTCLASS.equals(contextClass))
{
@@ -591,8 +554,8 @@ public class METSRightsCrosswalk
+ "Please restore this group using the SITE AIP, or recreate it.");
}
//assign permissions to group on this object
assignPermissions(context, dso, group, permsElement);
//assign group to policy
rp.setGroup(group);
}
catch(PackageException pe)
{
@@ -600,7 +563,7 @@ public class METSRightsCrosswalk
//We'll just wrap it as a CrosswalkException and throw it upwards
throw new CrosswalkException(pe);
}
}//end if Group
}// else if this permission pertains to a DSpace person
else if(PERSON_CONTEXTCLASS.equals(contextClass))
{
//we need to find the person it pertains to
@@ -629,89 +592,26 @@ public class METSRightsCrosswalk
+ "Please restore this Person object using the SITE AIP, or recreate it.");
}
//assign permissions to person on this object
assignPermissions(context, dso, person, permsElement);
//assign person to the policy
rp.setEPerson(person);
}//end if Person
else
else {
log.error("Unrecognized CONTEXTCLASS: " + contextClass);
}
} //end if "Context" element
}//end while loop
}
}
//set permissions on policy add to list of policies
rp.setAction(parsePermissions(permsElement));
policies.add(rp);
} //end if "Context" element
}//end for loop
/**
* Parses the 'permsElement' (corresponding to a <code>Permissions</code>
* element), and assigns those permissions to the specified Group
* on the specified DSpace Object.
*
* @param context DSpace context object
* @param dso The DSpace Object
* @param group The DSpace Group
* @param permsElement The METSRights <code>Permissions</code> element
*/
private void assignPermissions(Context context, DSpaceObject dso, List<ResourcePolicy> policies)
throws SQLException, AuthorizeException
{
AuthorizeManager.removeAllPolicies(context, dso);
if (policies == null){
throw new AuthorizeException("Policies are null");
}
else{
// Finally, we need to remove any existing policies from the current object,
// and replace them with the policies provided via METSRights. NOTE:
// if the list of policies provided by METSRights is an empty list, then
// the final object will have no policies attached.
AuthorizeManager.removeAllPolicies(context, dso);
AuthorizeManager.addPolicies(context, policies, dso);
}
}
private void assignPermissions(Context context, DSpaceObject dso, Group group, Element permsElement)
throws SQLException, AuthorizeException
{
//first, parse our permissions to determine which action we are allowing in DSpace
int actionID = parsePermissions(permsElement);
//If action ID is less than base READ permissions (value=0),
// then something must've gone wrong in the parsing
if(actionID < Constants.READ)
{
log.warn("Unable to properly restore all access permissions on object ("
+ "type=" + Constants.typeText[dso.getType()] + ", "
+ "handle=" + dso.getHandle() + ", "
+ "ID=" + dso.getID()
+ ") for group '" + group.getName() + "'.");
}
//Otherwise, add the appropriate group policy for this object
AuthorizeManager.addPolicy(context, dso, actionID, group);
}
/**
* Parses the 'permsElement' (corresponding to a <code>Permissions</code>
* element), and assigns those permissions to the specified EPerson
* on the specified DSpace Object.
*
* @param context DSpace context object
* @param dso The DSpace Object
* @param person The DSpace EPerson
* @param permsElement The METSRights <code>Permissions</code> element
*/
private void assignPermissions(Context context, DSpaceObject dso, EPerson person, Element permsElement)
throws SQLException, AuthorizeException
{
//first, parse our permissions to determine which action we are allowing in DSpace
int actionID = parsePermissions(permsElement);
//If action ID is less than base READ permissions (value=0),
// then something must've gone wrong in the parsing
if(actionID < Constants.READ)
{
log.warn("Unable to properly restore all access permissions on object ("
+ "type=" + Constants.typeText[dso.getType()] + ", "
+ "handle=" + dso.getHandle() + ", "
+ "ID=" + dso.getID()
+ ") for person '" + person.getEmail() + "'.");
}
//Otherwise, add the appropriate EPerson policy for this object
AuthorizeManager.addPolicy(context, dso, actionID, person);
} // end else
}
/**
@@ -784,4 +684,5 @@ private void assignPermissions(Context context, DSpaceObject dso, List<ResourceP
// return -1 to signify failure (as 0 = READ permissions)
return -1;
}
}

View File

@@ -21,15 +21,7 @@ import java.util.zip.ZipFile;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.Bundle;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.FormatIdentifier;
import org.dspace.content.Item;
import org.dspace.content.WorkspaceItem;
import org.dspace.content.*;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.content.crosswalk.MetadataValidationException;
import org.dspace.core.ConfigurationManager;
@@ -37,6 +29,8 @@ import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.handle.HandleManager;
import org.dspace.workflow.WorkflowItem;
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
import org.jdom.Element;
/**
@@ -324,18 +318,18 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester
}
else
{
ZipFile zip = new ZipFile(pkgFile);
try(ZipFile zip = new ZipFile(pkgFile))
{
// Retrieve the manifest file entry (named mets.xml)
ZipEntry manifestEntry = zip.getEntry(METSManifest.MANIFEST_FILE);
// Retrieve the manifest file entry (named mets.xml)
ZipEntry manifestEntry = zip.getEntry(METSManifest.MANIFEST_FILE);
// parse the manifest and sanity-check it.
manifest = METSManifest.create(zip.getInputStream(manifestEntry),
validate, getConfigurationName());
// close the Zip file for now
// (we'll extract the other files from zip when we need them)
zip.close();
if(manifestEntry!=null)
{
// parse the manifest and sanity-check it.
manifest = METSManifest.create(zip.getInputStream(manifestEntry),
validate, getConfigurationName());
}
}
}
// return our parsed out METS manifest
@@ -660,8 +654,24 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester
addBitstreams(context, item, manifest, pkgFile, params, callback);
// have subclass manage license since it may be extra package file.
addLicense(context, item, license, (Collection) dso
.getParentObject(), params);
Collection owningCollection = (Collection) dso.getParentObject();
if(owningCollection == null)
{
//We are probably dealing with an item that isn't archived yet
InProgressSubmission inProgressSubmission = WorkspaceItem.findByItem(context, item);
if(inProgressSubmission == null)
{
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("xmlworkflow"))
{
inProgressSubmission = XmlWorkflowItem.findByItem(context, item);
}else{
inProgressSubmission = WorkflowItem.findByItem(context, item);
}
}
owningCollection = inProgressSubmission.getCollection();
}
addLicense(context, item, license, owningCollection, params);
// FIXME ?
// should set lastModifiedTime e.g. when ingesting AIP.

View File

@@ -127,6 +127,8 @@ public class Constants
*/
public static final int ADMIN = 11;
public static final int WITHDRAWN_READ = 12;
/** Position of front page news item -- top box */
public static final int NEWS_TOP = 0;
@@ -139,7 +141,7 @@ public class Constants
public static final String[] actionText = { "READ", "WRITE",
"OBSOLETE (DELETE)", "ADD", "REMOVE", "WORKFLOW_STEP_1",
"WORKFLOW_STEP_2", "WORKFLOW_STEP_3", "WORKFLOW_ABORT",
"DEFAULT_BITSTREAM_READ", "DEFAULT_ITEM_READ", "ADMIN" };
"DEFAULT_BITSTREAM_READ", "DEFAULT_ITEM_READ", "ADMIN", "WITHDRAWN_READ" };
/**
* generating constants for the relevance array dynamically is simple: just
@@ -175,7 +177,9 @@ public class Constants
0, // 8 - WORKFLOW_ABORT
RCOLLECTION, // 9 - DEFAULT_BITSTREAM_READ
RCOLLECTION, // 10 - DEFAULT_ITEM_READ
RITEM | RCOLLECTION | RCOMMUNITY // 11 - ADMIN
RITEM | RCOLLECTION | RCOMMUNITY, // 11 - ADMIN
RBITSTREAM | RBUNDLE | RITEM // 12 - WITHDRAWN_READ
};
public static final String DEFAULT_ENCODING = "UTF-8";

View File

@@ -474,8 +474,18 @@ public class Email
System.out.println(" - To: " + to);
System.out.println(" - Subject: " + subject);
System.out.println(" - Server: " + server);
boolean disabled = ConfigurationManager.getBooleanProperty("mail.server.disabled", false);
try
{
if( disabled)
{
System.err.println("\nError sending email:");
System.err.println(" - Error: cannot test email because mail.server.disabled is set to true");
System.err.println("\nPlease see the DSpace documentation for assistance.\n");
System.err.println("\n");
System.exit(1);
return;
}
e.send();
}
catch (MessagingException me)

View File

@@ -16,6 +16,9 @@ import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import org.dspace.core.service.NewsService;
import org.dspace.utils.DSpace;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -39,6 +42,10 @@ public class NewsManager
*/
public static String readNewsFile(String newsFile)
{
NewsService newsService = new DSpace().getSingletonService(NewsService.class);
if (!newsService.validate(newsFile)) {
throw new IllegalArgumentException("The file "+ newsFile + " is not a valid news file");
}
String fileName = getNewsFilePath();
fileName += newsFile;
@@ -81,6 +88,10 @@ public class NewsManager
*/
public static String writeNewsFile(String newsFile, String news)
{
NewsService newsService = new DSpace().getSingletonService(NewsService.class);
if (!newsService.validate(newsFile)) {
throw new IllegalArgumentException("The file "+ newsFile + " is not a valid news file");
}
String fileName = getNewsFilePath();
fileName += newsFile;

View File

@@ -0,0 +1,29 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.core;
import java.util.List;
import org.dspace.core.service.NewsService;
public class NewsServiceImpl implements NewsService {
private List<String> acceptableFilenames;
public void setAcceptableFilenames(List<String> acceptableFilenames) {
this.acceptableFilenames = acceptableFilenames;
}
@Override
public boolean validate(String newsName) {
if (acceptableFilenames != null) {
return acceptableFilenames.contains(newsName);
}
return false;
}
}

View File

@@ -0,0 +1,12 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.core.service;
public interface NewsService {
boolean validate(String newsName);
}

View File

@@ -164,7 +164,7 @@ public class CurationCli
}
else
{
c.setIgnoreAuthorization(true);
c.turnOffAuthorisationSystem();
}
Curator curator = new Curator();

View File

@@ -29,36 +29,50 @@ import org.dspace.core.PluginManager;
/**
* TaskResolver takes a logical name of a curation task and attempts to deliver
* a suitable implementation object. Supported implementation types include:
* (1) Classpath-local Java classes configured and loaded via PluginManager.
* (2) Local script-based tasks, viz. coded in any scripting language whose
* <ol>
* <li> Classpath-local Java classes configured and loaded via PluginManager</li>.
* <li> Local script-based tasks, viz. coded in any scripting language whose
* runtimes are accessible via the JSR-223 scripting API. This really amounts
* to the family of dynamic JVM languages: JRuby, Jython, Groovy, Javascript, etc
* Note that the requisite jars and other resources for these languages must be
* installed in the DSpace instance for them to be used here.
* to the family of dynamic JVM languages: JRuby, Jython, Groovy, Javascript, etc.</li>
* </ol>
* <p>
* Note that the requisite jars and other resources for these languages must
* be installed in the DSpace instance for them to be used here.
* Further work may involve remote URL-loadable code, etc.
*
* <p>
* Scripted tasks are managed in a directory configured with the
* dspace/config/modules/curate.cfg property "script.dir". A catalog of
* scripted tasks named 'task.catalog" is kept in this directory.
* {@code dspace/config/modules/curate.cfg} property "script.dir".
* A catalog of
* scripted tasks named "task.catalog" is kept in this directory.
* Each task has a 'descriptor' property with value syntax:
* <engine>|<relFilePath>|<implClassCtor>
* <br/>
* {@code <engine>|<relFilePath>|<implClassCtor>}
*
* <p>
* An example property:
*
* linkchecker = ruby|rubytask.rb|LinkChecker.new
*
* <br/>
* {@code linkchecker = ruby|rubytask.rb|LinkChecker.new}
*
* <p>
* This descriptor means that a 'ruby' script engine will be created,
* a script file named 'rubytask.rb' in the directory <script.dir> will be
* loaded and the resolver will expect an evaluation of 'LinkChecker.new' will
* provide a correct implementation object.
*
* a script file named 'rubytask.rb' in the directory {@code <script.dir>}
* will be
* loaded and the resolver will expect an evaluation of 'LinkChecker.new'
* will provide a correct implementation object.
*
* <p>
* Script files may embed their descriptors to facilitate deployment.
* To accomplish this, a script must include the descriptor string with syntax:
* $td=<descriptor> somewhere on a comment line. for example:
*
* # My descriptor $td=ruby|rubytask.rb|LinkChecker.new
*
* For portability, the <relFilePath> component may be omitted in this context.
* Thus, $td=ruby||LinkChecker.new will be expanded to a descriptor
* To accomplish this, a script must include the descriptor string with
* syntax {@code $td=<descriptor>} somewhere on a comment line. For example:
*
* <p>
* {@code # My descriptor $td=ruby|rubytask.rb|LinkChecker.new}
*
* <p>
* For portability, the {@code <relFilePath>} component may be omitted in
* this context.
* Thus, {@code $td=ruby||LinkChecker.new} will be expanded to a descriptor
* with the name of the embedding file.
*
* @author richardrodgers

View File

@@ -218,10 +218,20 @@ public class DiscoverQuery {
this.facetOffset = facetOffset;
}
/**
* Sets the fields which you want Discovery to return in the search results.
* It is HIGHLY recommended to limit the fields returned, as by default
* some backends (like Solr) will return everything.
* @param field field to add to the list of fields returned
*/
public void addSearchField(String field){
this.searchFields.add(field);
}
/**
* Get list of fields which Discovery will return in the search results
* @return List of field names
*/
public List<String> getSearchFields() {
return searchFields;
}

View File

@@ -39,7 +39,7 @@ public class IndexClient {
public static void main(String[] args) throws SQLException, IOException, SearchServiceException {
Context context = new Context();
context.setIgnoreAuthorization(true);
context.turnOffAuthorisationSystem();
String usage = "org.dspace.discovery.IndexClient [-cbhf[r <item handle>]] or nothing to update/clean an existing index.";
Options options = new Options();

View File

@@ -113,4 +113,11 @@ public interface SearchService {
* @return the indexed field
*/
String toSortFieldIndex(String metadataField, String type);
/**
* Utility method to escape any special characters in a user's query
* @param query
* @return query with any special characters escaped
*/
String escapeQueryChars(String query);
}

View File

@@ -119,6 +119,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
private static final Logger log = Logger.getLogger(SolrServiceImpl.class);
protected static final String LAST_INDEXED_FIELD = "SolrIndexer.lastIndexed";
protected static final String HANDLE_FIELD = "handle";
protected static final String RESOURCE_TYPE_FIELD = "search.resourcetype";
protected static final String RESOURCE_ID_FIELD = "search.resourceid";
public static final String FILTER_SEPARATOR = "\n|||\n";
@@ -149,9 +152,11 @@ public class SolrServiceImpl implements SearchService, IndexingService {
solr.setBaseURL(solrService);
solr.setUseMultiPartPost(true);
// Dummy/test query to search for Item (type=2) of ID=1
SolrQuery solrQuery = new SolrQuery()
.setQuery("search.resourcetype:2 AND search.resourceid:1");
.setQuery(RESOURCE_TYPE_FIELD + ":2 AND " + RESOURCE_ID_FIELD + ":1");
// Only return obj identifier fields in result doc
solrQuery.setFields(RESOURCE_TYPE_FIELD, RESOURCE_ID_FIELD);
solr.query(solrQuery);
// As long as Solr initialized, check with DatabaseUtils to see
@@ -323,7 +328,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
try {
if(getSolr() != null){
getSolr().deleteByQuery("handle:\"" + handle + "\"");
getSolr().deleteByQuery(HANDLE_FIELD + ":\"" + handle + "\"");
if(commit)
{
getSolr().commit();
@@ -462,37 +467,49 @@ public class SolrServiceImpl implements SearchService, IndexingService {
}
if (force)
{
getSolr().deleteByQuery("search.resourcetype:[2 TO 4]");
getSolr().deleteByQuery(RESOURCE_TYPE_FIELD + ":[2 TO 4]");
} else {
SolrQuery query = new SolrQuery();
query.setQuery("search.resourcetype:[2 TO 4]");
QueryResponse rsp = getSolr().query(query);
SolrDocumentList docs = rsp.getResults();
// Query for all indexed Items, Collections and Communities,
// returning just their handle
query.setFields(HANDLE_FIELD);
query.addSort(HANDLE_FIELD, SolrQuery.ORDER.asc);
query.setQuery(RESOURCE_TYPE_FIELD + ":[2 TO 4]");
Iterator iter = docs.iterator();
while (iter.hasNext())
{
// Get the total amount of results
QueryResponse totalResponse = getSolr().query(query);
long total = totalResponse.getResults().getNumFound();
SolrDocument doc = (SolrDocument) iter.next();
int start = 0;
int batch = 100;
String handle = (String) doc.getFieldValue("handle");
query.setRows(batch);
while (start < total) {
query.setStart(start);
QueryResponse rsp = getSolr().query(query);
SolrDocumentList docs = rsp.getResults();
DSpaceObject o = HandleManager.resolveToObject(context, handle);
for (SolrDocument doc : docs) {
String handle = (String) doc.getFieldValue(HANDLE_FIELD);
if (o == null)
{
log.info("Deleting: " + handle);
/*
* Use IndexWriter to delete, its easier to manage
* write.lock
*/
unIndexContent(context, handle);
} else {
context.removeCached(o, o.getID());
log.debug("Keeping: " + handle);
DSpaceObject o = HandleManager.resolveToObject(context, handle);
if (o == null) {
log.info("Deleting: " + handle);
/*
* Use IndexWriter to delete, its easier to manage
* write.lock
*/
unIndexContent(context, handle);
} else {
context.removeCached(o, o.getID());
log.debug("Keeping: " + handle);
}
}
start += batch;
}
}
}
} catch(Exception e)
{
@@ -616,7 +633,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
boolean inIndex = false;
SolrQuery query = new SolrQuery();
query.setQuery("handle:" + handle);
query.setQuery(HANDLE_FIELD + ":" + handle);
// Specify that we ONLY want the LAST_INDEXED_FIELD returned in the field list (fl)
query.setFields(LAST_INDEXED_FIELD);
QueryResponse rsp;
try {
@@ -1444,9 +1463,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
// New fields to weaken the dependence on handles, and allow for faster
// list display
doc.addField("search.uniqueid", type+"-"+id);
doc.addField("search.resourcetype", Integer.toString(type));
doc.addField(RESOURCE_TYPE_FIELD, Integer.toString(type));
doc.addField("search.resourceid", Integer.toString(id));
doc.addField(RESOURCE_ID_FIELD, Integer.toString(id));
// want to be able to search for handle, so use keyword
// (not tokenized, but it is indexed)
@@ -1454,7 +1473,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
{
// want to be able to search for handle, so use keyword
// (not tokenized, but it is indexed)
doc.addField("handle", handle);
doc.addField(HANDLE_FIELD, handle);
}
if (locations != null)
@@ -1584,7 +1603,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
discoveryQuery.addFilterQueries("location:l" + dso.getID());
} else if (dso instanceof Item)
{
discoveryQuery.addFilterQueries("handle:" + dso.getHandle());
discoveryQuery.addFilterQueries(HANDLE_FIELD + ":" + dso.getHandle());
}
}
return search(context, discoveryQuery, includeUnDiscoverable);
@@ -1620,6 +1639,18 @@ public class SolrServiceImpl implements SearchService, IndexingService {
}
solrQuery.setQuery(query);
// Add any search fields to our query. This is the limited list
// of fields that will be returned in the solr result
for(String fieldName : discoveryQuery.getSearchFields())
{
solrQuery.addField(fieldName);
}
// Also ensure a few key obj identifier fields are returned with every query
solrQuery.addField(HANDLE_FIELD);
solrQuery.addField(RESOURCE_TYPE_FIELD);
solrQuery.addField(RESOURCE_ID_FIELD);
if(discoveryQuery.isSpellCheck())
{
solrQuery.setParam(SpellingParams.SPELLCHECK_Q, query);
@@ -1640,7 +1671,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
}
if(discoveryQuery.getDSpaceObjectFilter() != -1)
{
solrQuery.addFilterQuery("search.resourcetype:" + discoveryQuery.getDSpaceObjectFilter());
solrQuery.addFilterQuery(RESOURCE_TYPE_FIELD + ":" + discoveryQuery.getDSpaceObjectFilter());
}
for (int i = 0; i < discoveryQuery.getFieldPresentQueries().size(); i++)
@@ -1753,7 +1784,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
query.addFilterQueries("location:l" + dso.getID());
} else if (dso instanceof Item)
{
query.addFilterQueries("handle:" + dso.getHandle());
query.addFilterQueries(HANDLE_FIELD + ":" + dso.getHandle());
}
}
return searchJSON(context, query, jsonIdentifier);
@@ -1807,7 +1838,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
{
result.addDSpaceObject(dso);
} else {
log.error(LogManager.getHeader(context, "Error while retrieving DSpace object from discovery index", "Handle: " + doc.getFirstValue("handle")));
log.error(LogManager.getHeader(context, "Error while retrieving DSpace object from discovery index", "Handle: " + doc.getFirstValue(HANDLE_FIELD)));
continue;
}
@@ -1926,9 +1957,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
protected static DSpaceObject findDSpaceObject(Context context, SolrDocument doc) throws SQLException {
Integer type = (Integer) doc.getFirstValue("search.resourcetype");
Integer id = (Integer) doc.getFirstValue("search.resourceid");
String handle = (String) doc.getFirstValue("handle");
Integer type = (Integer) doc.getFirstValue(RESOURCE_TYPE_FIELD);
Integer id = (Integer) doc.getFirstValue(RESOURCE_ID_FIELD);
String handle = (String) doc.getFirstValue(HANDLE_FIELD);
if (type != null && id != null)
{
@@ -1981,7 +2012,8 @@ public class SolrServiceImpl implements SearchService, IndexingService {
SolrQuery solrQuery = new SolrQuery();
solrQuery.setQuery(query);
solrQuery.setFields("search.resourceid", "search.resourcetype");
//Only return obj identifier fields in result doc
solrQuery.setFields(RESOURCE_ID_FIELD, RESOURCE_TYPE_FIELD);
solrQuery.setStart(offset);
solrQuery.setRows(max);
if (orderfield != null)
@@ -2001,7 +2033,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
{
SolrDocument doc = (SolrDocument) iter.next();
DSpaceObject o = DSpaceObject.find(context, (Integer) doc.getFirstValue("search.resourcetype"), (Integer) doc.getFirstValue("search.resourceid"));
DSpaceObject o = DSpaceObject.find(context, (Integer) doc.getFirstValue(RESOURCE_TYPE_FIELD), (Integer) doc.getFirstValue(RESOURCE_ID_FIELD));
if (o != null)
{
@@ -2089,7 +2121,9 @@ public class SolrServiceImpl implements SearchService, IndexingService {
try{
SolrQuery solrQuery = new SolrQuery();
//Set the query to handle since this is unique
solrQuery.setQuery("handle: " + item.getHandle());
solrQuery.setQuery(HANDLE_FIELD + ": " + item.getHandle());
//Only return obj identifier fields in result doc
solrQuery.setFields(HANDLE_FIELD, RESOURCE_TYPE_FIELD, RESOURCE_ID_FIELD);
//Add the more like this parameters !
solrQuery.setParam(MoreLikeThisParams.MLT, true);
//Add a comma separated list of the similar fields
@@ -2320,4 +2354,13 @@ public class SolrServiceImpl implements SearchService, IndexingService {
throw new SearchServiceException(e.getMessage(), e);
}
}
@Override
public String escapeQueryChars(String query) {
// Use Solr's built in query escape tool
// WARNING: You should only escape characters from user entered queries,
// otherwise you may accidentally BREAK field-based queries (which often
// rely on special characters to separate the field from the query value)
return ClientUtils.escapeQueryChars(query);
}
}

View File

@@ -91,10 +91,9 @@ public class EmbargoManager
}
}
String slift = myLift.toString();
boolean ignoreAuth = context.ignoreAuthorization();
try
{
context.setIgnoreAuthorization(true);
context.turnOffAuthorisationSystem();
item.clearMetadata(lift_schema, lift_element, lift_qualifier, Item.ANY);
item.addMetadata(lift_schema, lift_element, lift_qualifier, null, slift);
log.info("Set embargo on Item "+item.getHandle()+", expires on: "+slift);
@@ -105,7 +104,7 @@ public class EmbargoManager
}
finally
{
context.setIgnoreAuthorization(ignoreAuth);
context.restoreAuthSystemState();
}
}
@@ -267,7 +266,7 @@ public class EmbargoManager
try
{
context = new Context();
context.setIgnoreAuthorization(true);
context.turnOffAuthorisationSystem();
Date now = new Date();
// scan items under embargo

View File

@@ -499,7 +499,7 @@ public class EPerson extends DSpaceObject
break;
case LANGUAGE:
s = "m_text_value";
s = "m.text_value";
t = "language";
break;
case NETID:
@@ -507,23 +507,26 @@ public class EPerson extends DSpaceObject
break;
default:
s = "m_text_value";
s = "m.text_value";
t = "lastname";
}
// NOTE: The use of 's' in the order by clause can not cause an SQL
// injection because the string is derived from constant values above.
TableRowIterator rows = DatabaseManager.query(context, "SELECT * FROM eperson e ORDER BY ?",s);
TableRowIterator rows;
if(!t.equals("")) {
rows = DatabaseManager.query(context,
"SELECT * FROM eperson e " +
"LEFT JOIN metadatavalue m on (m.resource_id = e.eperson_id and m.resource_type_id = ? and m.metadata_field_id = ?) " +
"ORDER BY ?",
"LEFT JOIN metadatavalue m on (m.resource_id = e.eperson_id and m.resource_type_id = ? and m.metadata_field_id = ?) " +
"ORDER BY " + s,
Constants.EPERSON,
MetadataField.findByElement(context, MetadataSchema.find(context, "eperson").getSchemaID(), t, null).getFieldID(),
s
MetadataField.findByElement(context, MetadataSchema.find(context, "eperson").getSchemaID(), t, null).getFieldID()
);
}
else {
rows = DatabaseManager.query(context, "SELECT * FROM eperson e ORDER BY " + s);
}

View File

@@ -358,9 +358,9 @@ public class Group extends DSpaceObject
}
/**
* fast check to see if an eperson is a member called with eperson id, does
* database lookup without instantiating all of the epeople objects and is
* thus a static method
* fast check to see if the current EPerson is a member of a Group. Does
* database lookup without instantiating all of the EPerson objects and is
* thus a static method.
*
* @param c
* context
@@ -380,6 +380,29 @@ public class Group extends DSpaceObject
return epersonInGroup(c, groupid, currentuser);
}
/**
* Fast check to see if a given EPerson is a member of a Group.
* Does database lookup without instantiating all of the EPerson objects and
* is thus a static method.
*
* @param c current DSpace context.
* @param eperson candidate to test for membership.
* @param groupid group whose membership is to be tested.
* @return true if {@link eperson} is a member of Group {@link groupid}.
* @throws SQLException passed through
*/
public static boolean isMember(Context c, EPerson eperson, int groupid)
throws SQLException
{
// Every EPerson is a member of Anonymous
if (groupid == 0)
{
return true;
}
return epersonInGroup(c, groupid, eperson);
}
/**
* Get all of the groups that an eperson is a member of.
*

View File

@@ -284,14 +284,23 @@ public class EventManager
{
Context ctx = new Context();
for (Iterator ci = ((Dispatcher) dispatcher).getConsumers()
.iterator(); ci.hasNext();)
{
ConsumerProfile cp = (ConsumerProfile) ci.next();
if (cp != null)
try {
for (Iterator ci = ((Dispatcher) dispatcher).getConsumers()
.iterator(); ci.hasNext();)
{
cp.getConsumer().finish(ctx);
ConsumerProfile cp = (ConsumerProfile) ci.next();
if (cp != null)
{
cp.getConsumer().finish(ctx);
}
}
ctx.complete();
} catch (Exception e) {
ctx.abort();
throw e;
}
return;

View File

@@ -9,89 +9,167 @@ package org.dspace.handle;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.sql.SQLException;
import org.apache.log4j.Logger;
import org.dspace.core.Context;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.search.DSIndexer;
import org.dspace.browse.IndexBrowse;
import org.dspace.discovery.IndexClient;
/**
* A script to update the handle values in the database. This is typically used
* when moving from a test machine (handle = 123456789) to a production service.
* when moving from a test machine (handle = 123456789) to a production service
* or when make a test clone from production service.
*
* @author Stuart Lewis
* @author Ivo Prajer (Czech Technical University in Prague)
*/
public class UpdateHandlePrefix
{
private static final Logger log = Logger.getLogger(UpdateHandlePrefix.class);
/**
* When invoked as a command-line tool, updates handle prefix
*
* @param args the command-line arguments, none used
* @throws java.lang.Exception
*
*/
public static void main(String[] args) throws Exception
{
// There should be two paramters
// There should be two parameters
if (args.length < 2)
{
System.out.println("\nUsage: update-handle-prefix <old handle> <new handle>\n");
System.exit(1);
}
else
{
// Confirm with the user that this is what they want to do
String oldH = args[0];
String newH = args[1];
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
// Get info about changes
System.out.println("\nGetting information about handles from database...");
Context context = new Context();
System.out.println("If you continue, all handles in your repository with prefix " +
oldH + " will be updated to have handle prefix " + newH + "\n");
String sql = "SELECT count(*) as count FROM handle " +
String sql = "SELECT count(*) as count " +
"FROM handle " +
"WHERE handle LIKE '" + oldH + "%'";
TableRow row = DatabaseManager.querySingle(context, sql, new Object[] {});
long count = row.getLongColumn("count");
System.out.println(count + " items will be updated.\n");
System.out.print("Have you taken a backup, and are you ready to continue? [y/n]: ");
String choiceString = input.readLine();
if (choiceString.equalsIgnoreCase("y"))
if (count > 0)
{
// Make the changes
System.out.print("Updating handle table... ");
sql = "update handle set handle = '" + newH + "' || '/' || handle_id " +
"where handle like '" + oldH + "/%'";
int updated = DatabaseManager.updateQuery(context, sql, new Object[] {});
System.out.println(updated + " items updated");
// Print info text about changes
System.out.println(
"In your repository will be updated " + count + " handle" +
((count > 1) ? "s" : "") + " to new prefix " + newH +
" from original " + oldH + "!\n"
);
System.out.print("Updating metadatavalues table... ");
sql = "UPDATE metadatavalue SET text_value= (SELECT 'http://hdl.handle.net/' || " +
"handle FROM handle WHERE handle.resource_id=item_id AND " +
"handle.resource_type_id=2) WHERE text_value LIKE 'http://hdl.handle.net/%';";
updated = DatabaseManager.updateQuery(context, sql, new Object[] {});
System.out.println(updated + " metadata values updated");
// Confirm with the user that this is what they want to do
System.out.print(
"Servlet container (e.g. Apache Tomcat, Jetty, Caucho Resin) must be running.\n" +
"If it is necessary, please make a backup of the database.\n" +
"Are you ready to continue? [y/n]: "
);
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
String choiceString = input.readLine();
// Commit the changes
context.complete();
System.out.print("Re-creating browse and search indexes... ");
// Reinitialise the browse system
IndexBrowse.main(new String[] {"-i"});
// Reinitialise the browse system
try
if (choiceString.equalsIgnoreCase("y"))
{
DSIndexer.main(new String[0]);
}
catch (Exception e)
{
// Not a lot we can do
System.out.println("Error re-indexing:");
e.printStackTrace();
System.out.println("\nPlease manually run [dspace]/bin/index-all");
}
try {
log.info("Updating handle prefix from " + oldH + " to " + newH);
// All done
System.out.println("\nHandles successfully updated.");
// Make the changes
System.out.print("\nUpdating handle table... ");
sql = "UPDATE handle " +
"SET handle = '" + newH + "' || '/' || handle_id " +
"WHERE handle like '" + oldH + "/%'";
int updHdl = DatabaseManager.updateQuery(context, sql, new Object[] {});
System.out.println(
updHdl + " item" + ((updHdl > 1) ? "s" : "") + " updated"
);
System.out.print("Updating metadatavalues table... ");
sql = "UPDATE metadatavalue " +
"SET text_value = " +
"(" +
"SELECT 'http://hdl.handle.net/' || handle " +
"FROM handle " +
"WHERE handle.resource_id = metadatavalue.resource_id " +
"AND handle.resource_type_id = 2" +
") " +
"WHERE text_value LIKE 'http://hdl.handle.net/" + oldH + "/%'" +
"AND EXISTS " +
"(" +
"SELECT 1 " +
"FROM handle " +
"WHERE handle.resource_id = metadatavalue.resource_id " +
"AND handle.resource_type_id = 2" +
")";
int updMeta = DatabaseManager.updateQuery(context, sql, new Object[] {});
System.out.println(
updMeta + " metadata value" + ((updMeta > 1) ? "s" : "") + " updated"
);
// Commit the changes
context.complete();
log.info(
"Done with updating handle prefix. " +
"It was changed " + updHdl + " handle" + ((updHdl > 1) ? "s" : "") +
" and " + updMeta + " metadata record" + ((updMeta > 1) ? "s" : "")
);
}
catch (SQLException sqle)
{
if ((context != null) && (context.isValid()))
{
context.abort();
context = null;
}
System.out.println("\nError during SQL operations.");
throw sqle;
}
System.out.println("Handles successfully updated in database.\n");
System.out.println("Re-creating browse and search indexes...");
try
{
// Reinitialise the search and browse system
IndexClient.main(new String[] {"-b"});
System.out.println("Browse and search indexes are ready now.");
// All done
System.out.println("\nAll done successfully. Please check the DSpace logs!\n");
}
catch (Exception e)
{
// Not a lot we can do
System.out.println("Error during re-indexing.");
System.out.println(
"\n\nAutomatic re-indexing failed. Please perform it manually.\n" +
"You should run one of the following commands:\n\n" +
" [dspace]/bin/dspace index-discovery -b\n\n" +
"If you are using Solr for browse (this is the default setting).\n" +
"When launching this command, your servlet container must be running.\n\n" +
" [dspace]/bin/dspace index-lucene-init\n\n" +
"If you enabled Lucene for search.\n" +
"When launching this command, your servlet container must be shutdown.\n"
);
throw e;
}
}
else
{
System.out.println("No changes have been made to your data.\n");
}
}
else
{
System.out.println("No changes have been made to your data.");
System.out.println("Nothing to do! All handles are up-to-date.\n");
}
}
}

View File

@@ -10,7 +10,6 @@ package org.dspace.identifier;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Metadatum;
@@ -68,12 +67,12 @@ public class DOIIdentifierProvider
public static final String DOI_QUALIFIER = "uri";
public static final Integer TO_BE_REGISTERED = 1;
public static final Integer TO_BE_RESERVERED = 2;
public static final Integer TO_BE_RESERVED = 2;
public static final Integer IS_REGISTERED = 3;
public static final Integer IS_RESERVED = 4;
public static final Integer UPDATE_RESERVERED = 5;
public static final Integer UPDATE_RESERVED = 5;
public static final Integer UPDATE_REGISTERED = 6;
public static final Integer UPDATE_BEFORE_REGISTERATION = 7;
public static final Integer UPDATE_BEFORE_REGISTRATION = 7;
public static final Integer TO_BE_DELETED = 8;
public static final Integer DELETED = 9;
@@ -251,7 +250,7 @@ public class DOIIdentifierProvider
return;
}
doiRow.setColumn("status", TO_BE_RESERVERED);
doiRow.setColumn("status", TO_BE_RESERVED);
try
{
DatabaseManager.update(context, doiRow);
@@ -353,11 +352,11 @@ public class DOIIdentifierProvider
}
else if (TO_BE_REGISTERED == doiRow.getIntColumn("status"))
{
doiRow.setColumn("status", UPDATE_BEFORE_REGISTERATION);
doiRow.setColumn("status", UPDATE_BEFORE_REGISTRATION);
}
else if (IS_RESERVED == doiRow.getIntColumn("status"))
{
doiRow.setColumn("status", UPDATE_RESERVERED);
doiRow.setColumn("status", UPDATE_RESERVED);
}
else
{
@@ -416,11 +415,11 @@ public class DOIIdentifierProvider
{
doiRow.setColumn("status", IS_REGISTERED);
}
else if (UPDATE_BEFORE_REGISTERATION == doiRow.getIntColumn("status"))
else if (UPDATE_BEFORE_REGISTRATION == doiRow.getIntColumn("status"))
{
doiRow.setColumn("status", TO_BE_REGISTERED);
}
else if (UPDATE_RESERVERED == doiRow.getIntColumn("status"))
else if (UPDATE_RESERVED == doiRow.getIntColumn("status"))
{
doiRow.setColumn("status", IS_RESERVED);
}

Some files were not shown because too many files have changed in this diff Show More