Compare commits

...

132 Commits

Author SHA1 Message Date
Kim Shepherd
a2f5fe34eb [maven-release-plugin] prepare release dspace-5.3 2015-07-29 09:30:55 +00:00
Kim Shepherd
ace19199e5 Updates to license files for 5.3 2015-07-29 08:18:54 +00:00
Tim Donohue
6d9fa26535 Minor fix. Since addBitstream() inherits polices, we need to first remove inherited custom policies 2015-07-28 11:42:41 -05:00
Pascal-Nicolas Becker
3efe549774 DS-2358: Preserves custom policy rules during versioning 2015-07-28 11:42:31 -05:00
Andrea Schweer
734744ec4f DS-2571 Fix jumping to value in descending browse 2015-07-23 14:37:44 -05:00
rradillen
829c30bab4 change default of findAuthorizedPerformanceOptimize
it is now false
2015-07-22 19:36:04 +00:00
rradillen
83cb04ed53 move Collection policy optimization property 2015-07-22 19:35:54 +00:00
rradillen
0911d60290 [DS-2527] Disable collection authorisation enumeration optimisation
Disable collection authorisation enumeration optimisation so LDAP and Shibboleth may work out of the box.
2015-07-22 19:35:43 +00:00
Pablo Buenaposada
9bb7036857 closing more span tags
Missing closing more span tags
2015-07-22 19:24:13 +00:00
pablobuenaposada
e0368f3ade fixed dots in upper pagination 2015-07-22 19:22:59 +00:00
Tim Donohue
660217c3f9 Add a new DSpaceWithdrawnFilter in place of hardcoding in DSpaceAuthorizationFilter. Update configs, and fix bugs. 2015-07-22 19:13:28 +00:00
Tim Donohue
5f13b8cc64 Update XSL stylesheet to display "deleted" status, and correct misspellings 2015-07-22 19:13:15 +00:00
Tim Donohue
a2caabc79a Fix DS-2593 : Withdrawn items are now given a "tombstone" in OAI-PMH. Also fix Context mgmt issues & authorization code. 2015-07-22 19:13:05 +00:00
Hardy Pottinger
cb9710cda4 Merge pull request #993 from ufal/DS-2658
DS-2658: Fix wrong mapping for dc metadata in html head
2015-07-22 14:07:55 -05:00
Ondrej Kosarko
56abebaece fixes DS-2658 2015-07-17 10:13:37 +02:00
Pascal-Nicolas Becker
0310db74aa DS-2614: Ignore custom resource policies for unfinished items 2015-07-15 14:48:10 +00:00
Tim Donohue
3e1bac69df Escape special characters in JSPUI queries as well 2015-07-15 14:19:21 +00:00
Tim Donohue
ec86af5a82 Also ensure query escaped in AbstractSearch 2015-07-15 14:19:11 +00:00
Tim Donohue
79e111996b Fix DS-2602 and DS-2461 by properly escaping user entered queries 2015-07-15 14:19:00 +00:00
Tim Donohue
f4c6f2680c Revert "DS-2461 Escape some colons in queries"
This reverts commit 2575d73b2d.
2015-07-15 14:18:49 +00:00
nicolasschwab
f3487be040 DS-2603: now if the item doesn't have a primary bitstream the value of citation_pdf_url metadata will be a link to the first public bitstream according to the order established by the user. 2015-07-07 16:35:41 +00:00
Tim Donohue
87d0770974 Merge pull request #979 from bram-atmire/dspace-5_x
DS-2560 - Did you mean option missing in Mirage2 (backport to 5.x branch)
2015-07-06 17:24:13 -04:00
junwei1229
1c9fa656aa fix the oai index issue when submitter is null
when submitter is null, it will cause NPE exception, so it will stop the OAI index process. eg. if using harvest, the submitter probably will be null in db.
2015-07-06 21:04:22 +00:00
cjuergen
59ff964f4f Fix for DS-2543 cleans the cached OAI responses after doing a full item import with the -c option 2015-07-06 15:42:00 +00:00
Hardy Pottinger
10c4661885 Merge pull request #955 from Mini-Pillai/DS-2560
DS 2560 - Did you mean option missing in Mirage2
2015-06-30 08:46:48 +02:00
Ivan Masár
afe9c1294f test DSpace build on Travis container infrastructure 2015-06-23 13:45:00 +00:00
Bram Luyten
7a54972ed1 Merge pull request #965 from ufal/DS-2620
DS-2620 typo in the word cocoon
2015-06-20 14:33:00 +02:00
Ondrej Kosarko
b2cb0ef4dd typo in the word cocoon 2015-06-18 10:02:07 +02:00
Àlex Magaz Graça
5edf641d6c DS-2594 Long file names overlap the second column of item metadata in Mirage 2 2015-06-16 14:48:14 +02:00
Roeland Dillen
d9b14a86f0 DS-2618: Process mail.server.disabled in test-email 2015-06-16 14:43:21 +02:00
Andrea Schweer
7b8fa49632 DS-2598 Correct XPATH for available date in mets format xoai
This ensures that dc.date.available is shown when using the mets metadata
format in OAI-PMH. Previously, the dateAvailable element was present but empty.
2015-06-02 15:33:36 +02:00
Mark H. Wood
b5540d5999 Merge pull request #951 from mwoodiupui/DS-2590
[DS-2590] Fix multiple issues with distributed archives.
2015-05-26 12:29:19 -04:00
Mark H. Wood
494ff0c4c1 [DS-2590] Improved commentary about these obscure problems. 2015-05-26 12:14:58 -04:00
Mark H. Wood
1c4c8943a9 [DS-2590] Roll back problematic upgrade of maven-assembly-plugin 2015-05-26 11:53:57 -04:00
Mark H. Wood
5cd56fb834 [DS-2590] Fix multiple issues with distributed archives.
Include dspace/modules/*/src/main/webapps so build succeeds.
Avoid damaging a sample ZIP archive by munging "line endings".
Upgrade to maven-assembly-plugin 2.5.4 (which uncovered the line
ending problem).
2015-05-22 14:45:34 -04:00
Ondřej Košarko
ed89d6b00e DS-2587 Resource policies rptype is null after upgrading 2015-05-21 17:18:46 +02:00
Hardy Pottinger
19b28f4734 [maven-release-plugin] prepare for next development iteration 2015-05-20 11:42:40 -05:00
Hardy Pottinger
4a8fdf6843 [maven-release-plugin] prepare release dspace-5.2 2015-05-20 11:42:37 -05:00
Tim Donohue
d040b9dd4e Fix DS-2582: Remove all milliseconds from dates. Refactor code slightly. 2015-05-19 14:01:32 -05:00
Ondřej Košarko
4036bf781a DS-2020 null check & turning _ to / in handles 2015-05-18 11:34:55 -05:00
Antoine Snyers
d011e24f74 DS-2529 CSV import bugfix for fields under authority control with a language 2015-05-18 11:07:57 -05:00
Tim Donohue
0e9f78e9df Fix for DS-2577. Query for PostgreSQL constraint name. Surround with double quotes if value is $1 or similar. 2015-05-15 18:18:27 +00:00
Christian Scheible
254097b2e2 [DS-2546] added missing curly brackets 2015-05-15 18:01:52 +00:00
Christian Scheible
8049cef23b [DS-2546] Added missing ZULU time zones where applicable 2015-05-15 18:01:43 +00:00
Christian Scheible
de842dbf30 [DS-2546] fixes problem in DateUtils parsing 2015-05-15 18:01:33 +00:00
Ivan Masár
8bcac58154 minor fix: remove extra colon in string 2015-05-15 10:07:21 +02:00
Àlex Magaz Graça
511b78277f [DS-2186] Request item copy doesn't always use RequestItemAuthorExtractor 2015-05-15 16:37:18 +12:00
KevinVdV
dbd019943a [DS-2131] SWORDv2 ingestion fails with NullPointerException when replacing a non archived item 2015-05-15 15:07:57 +12:00
Pascal-Nicolas Becker
7d8a9d5636 DS-1965: Adds date fields to the form used to edit policies (JSPUI). 2015-05-15 10:30:36 +12:00
ctu-developers
2ab6b10a03 Removed unnecessary changes from previous commit. 2015-05-14 16:42:45 -05:00
ctu-developers
cd7789e8df Fix of getLink() by returning servlet context.
Added static method to Resource.java and using it in DSpaceObject.java
2015-05-14 16:42:35 -05:00
Ivo Prajer
9287aa891f Quick fix of getLink() 2015-05-14 16:42:22 -05:00
Pascal-Nicolas Becker
a99203382c DS-2551: JSPUI show thumbnails only if user has read permission 2015-05-14 16:32:21 -05:00
tmtvl
6ec649df78 DS-2562, fix incorrect if statement. 2015-05-14 21:22:29 +00:00
Mark H. Wood
e9f4e4c2cc [DS-2379] Lists returned by JDOM can't be sort()ed, so use a more cooperative class. 2015-05-14 13:40:50 -04:00
Mark H. Wood
18cc6bb3ff [DS-2379] Sort the list of commands by their names 2015-05-14 13:40:50 -04:00
ctu-developers
8094d8fe18 DS-2511: Repaired resource policy endpoints
Repaired all endpoints in REST api.
Added XML annotation in ResourcePolicy.
Repaired bug in Bitstream with expand field.
Repaired creating ResourcePolicy with bitstream.
2015-05-14 16:21:28 +02:00
Andrea Schweer
b7a469d53c DS-2575 Ensure pooled/owned workflow task are listed in fixed order 2015-05-14 15:40:52 +12:00
Andrea Schweer
f168c6c33d DS-2461 Escape some colons in queries
This allows searching for titles with colons while still allowing fielded searches
2015-05-14 11:13:37 +12:00
Pascal-Nicolas Becker
981b62d9e9 DS-2545: Show collections the user can submits items to only.
The JSPSelectColletionStep and JSPStartSubmissionLookupStep already set
the collections -the user can submit to- as request attribute. The JSPs
us these argument already. This commit lets the SelectCollectionTag use
this attribute too instead of looking for the collections on itself.
2015-05-13 17:10:09 -05:00
Andrea Schweer
2c42d71a6a DS-2544 Delete temp files when exception is encountered
As suggested by William Tantzen on Jira.
2015-05-14 08:48:52 +12:00
Andrea Schweer
ca6bc57c6d [DS-2544] Improve temp file handling for IM thumbnailer 2015-05-14 08:48:52 +12:00
Andrea Schweer
0f0be17d0a [DS-2549] Render Repo identifier / Sample identifier on Identify page 2015-05-14 08:44:06 +12:00
Panagiotis Koutsourakis
5e5a7922d0 Fix a bug in the "Jump to" browse feature
When computing the offset for the "jump to" feature at
SolrBrowseDAO.doOffsetQuery we should take into account if we are
browsing a subset of the items (e.g. we are viewing the items that have
a specific subject) and not all of them (e.g. by title).
2015-05-13 15:42:07 -05:00
Andrea Schweer
bb4cb39373 Remove box styling for file upload in Mirage 2 2015-05-14 08:39:24 +12:00
Andrea Schweer
a257f516fa DS-2449 Restore template item label for Mirage 2 2015-05-14 08:39:24 +12:00
Andrea Schweer
9d8284d85f [DS-2212] Ignore _version_ field when sharding solr stats 2015-05-13 15:13:35 -05:00
Christian Scheible
57efa4f628 [DS-2423] Added oai overlays to classpath to ensure that oai command line tools work (like clean-cache) 2015-05-06 18:49:56 +02:00
Christian Scheible
5b5f44085a [DS-2423] changed arcitecture of DSpace filters. Removed need for @Autowire because filters are not handled by spring. 2015-05-06 18:49:56 +02:00
Christian Scheible
46ce2741bc [DS-2423] changed tests for OAI-interface to autowire the Filters. 2015-05-06 18:49:56 +02:00
Christian Scheible
0b799fc882 [DS-2423] Added possibility to create additional Filter for DSpace OAI-PMH interface 2015-05-06 18:49:56 +02:00
Andrea Schweer
04b57a60b3 [DS-2486] Increase robustness, improve directory delete behaviour
As suggested by Mark Wood, delete directories only when they were actually
created (and when --keep is not given).

Also refuse to go ahead with reindexing when it's obvious that there won't be
enough space, plus deal a little more gracefully with common errors (initial
export failing; -temp core still left from previous attempt).
2015-05-01 12:02:04 -05:00
Andrea Schweer
02b4314046 Disable version check when importing temporary stats data 2015-05-01 12:01:48 -05:00
Andrea Schweer
3d79fa76ab Make import/export of temporary core more robust 2015-05-01 12:01:33 -05:00
Andrea Schweer
ca1803ae93 Use stats component to get minimum date 2015-05-01 12:01:21 -05:00
Andrea Schweer
9046ec21d4 Export monthly batches to limit the number of docs to sort 2015-05-01 12:01:06 -05:00
Andrea Schweer
b30654e3d5 DS-2486 Add Solr import/export to launcher.xml 2015-05-01 12:00:53 -05:00
Andrea Schweer
ee19e11e6d DS-2486 New scripts to export/clear/import solr indexes 2015-05-01 12:00:34 -05:00
Andrea Schweer
a990c97959 DS-2486 Add UUID processing to CSV, JSON update handlers too 2015-05-01 12:00:18 -05:00
Pascal-Nicolas Becker
56816b13ba Merge pull request #926 from tuub/DS-2550-dspace-5_x
DS-2550: fix ImageMagick/Ghostscript problems with transparent pdfs
2015-04-27 14:30:25 +02:00
Pascal-Nicolas Becker
b414aaa195 DS-2550: fix ImageMagick/Ghostscript problems with transparent pdfs 2015-04-27 14:24:00 +02:00
Ivo Prajer
1a1ae35ec9 DS-2218: Unable to use command "update-handle-prefix"
* Removed extra semicolon in the sql command
* added check for "up-to-date"
* fix updating metadata values
* basic logging to DSpace log and SQL exception handling
* Changed, customized, added user message and repaired their order.
* Fixed return codes and some typos.
* Changed re-index engine from DSIndexer to Discovery and info text about manual re-indexing.
* Changes in SQL formatting and formatting of code.
2015-04-22 11:28:46 +02:00
Ivo Prajer
1029f393e4 Fix passing parameters LIMIT and OFFSET to sql query in the method getItems() for oracle 2015-04-20 10:47:22 +02:00
Ivo Prajer
c1039dfe26 Fix passing parameters LIMIT/OFFSET to sql query in the findAll() 2015-04-20 10:47:11 +02:00
Ivan Masár
cc96646e37 DS-2474 METS format in OAI includes only the first author 2015-04-20 09:44:17 +02:00
Ivan Masár
d2ad7c81de DS-2491 set deletedRecord=transient in OAI Identify 2015-04-20 09:12:45 +02:00
Bram Luyten
00e9c1131f DS-2531 New entries for the robots hostname list 2015-04-20 09:09:37 +02:00
Chris Wilper
77cc9abe49 fix year and capitalization in displayed copyright 2015-04-16 20:28:45 +02:00
Mark H. Wood
91018bfe0f Merge pull request #909 from mwoodiupui/DS-2518-5x
[DS-2518] EZID DOI IdentityProvider doesn't set the location in metadata
2015-04-08 12:53:55 -04:00
Mark H. Wood
7f9bcb283f Repair testing environment, enable real unit tests, add test of metadata crosswalking. 2015-04-08 11:49:35 -04:00
Mark H. Wood
ae11c1c795 Add location metadata so that the DOI actually resolves properly. 2015-04-08 11:49:35 -04:00
cjuergen
9cd5fa596b Fix for DS-2482 adds the attribute dspace.community or dspace.collection to the search and browse request if we browse or search a community or collection 2015-04-08 17:24:02 +02:00
rradillen
e10b10224a [DS-2532] botness of a visit is not properly logged when a location cannot be determined
Extract isBot from the location==null if in two places.
2015-04-02 08:52:03 +02:00
Pascal-Nicolas Becker
e08886ae09 Merge pull request #898 from tuub/DS-2403-dspace-5_x
DS-2403: Resolves DS-2403 and reduce logging of RDFConsumer.
2015-03-25 19:42:30 +01:00
Pascal-Nicolas Becker
df3ffcf7f9 DS-2403: Resolves DS-2403 and reduce logging of RDFConsumer. 2015-03-25 19:14:20 +01:00
Andrea Schweer
0c77f7be91 [DS-2513] Improve multipart header parsing
The original code broke when files were uploaded whose name contains
the semicolon-space sequence.
2015-03-18 10:53:07 +01:00
David Cook
cdc8e3144e DS-2514 Packaged version of html5shiv.js is missing "main" element
Added "main" element to html5shiv.js, which should bring it inline
with the 3.6.2pre distribution from aFarkas's Github repo:
https://raw.githubusercontent.com/aFarkas/html5shiv/
3.6.2pre/dist/html5shiv.js

This can be verified by unminifying html5shiv.js in the above link
and html5shiv.js in DSpace master, and comparing them in any merge
program like vimdiff or WinMerge.

Without this patch, IE 8 self-closes the "main" element, and pushes
its child DIVs after it instead of nesting them within itself, which
has repercussions when styling the JSPUI with CSS.

With this patch, IE 8 comprehends the "main" element, and nests the
DIVs correctly.
2015-03-18 10:19:16 +01:00
Ivan Masár
92847079d7 Updated README.md 2015-03-16 18:59:08 +01:00
Ivan Masár
b023c36941 Updated README.md 2015-03-16 15:19:01 +01:00
ctu-developers
aee3b0b710 Updated README.md 2015-03-16 15:18:55 +01:00
Christian Scheible
d0c8afb601 DS-2424 workaround for bug in xoai library. changed ref to red for Filter in Contexts 2015-03-11 10:50:22 -05:00
Ivan Masár
e9c14bbcea DS-2501 fix SQL in REST /items/find-by-metadata-field 2015-03-09 22:17:14 +01:00
Àlex Magaz Graça
2eca19daa3 [DS-2493] "View more" link is shown even when there aren't more items. 2015-03-06 13:54:03 +01:00
Tim Donohue
bcc7a75baa DS-2483 : Fix mispelling of "sword.compatibility"
https://jira.duraspace.org/browse/DS-2483
2015-03-05 21:32:42 +00:00
Tim Donohue
19222e9341 DS-2477 : Ensure distribution packages alwasy get created with Unix (LF) line endings 2015-02-27 17:48:12 +01:00
Tim Donohue
8124a61738 [maven-release-plugin] prepare for next development iteration 2015-02-25 12:27:33 -06:00
Tim Donohue
09007146d0 [maven-release-plugin] prepare release dspace-5.1 2015-02-25 12:27:24 -06:00
Tim Donohue
e715c64404 Updates to LICENSES_THIRD_PARTY for 5.1 2015-02-25 11:54:42 -06:00
Luigi Andrea Pascarelli
53ff4510ac [DS-2044] fix cross-site scripting vulnerability and minor related issue
(verbose error output, avoid NPE on JSP during an attack)
2015-02-23 20:27:43 +00:00
Tim Donohue
495031001d DS-2278 : Fix two issues in XMLUI which block proper 404 Page Not Found pages from displaying for some URL patterns 2015-02-23 12:48:21 -06:00
Mark H. Wood
97e89384f1 Don't close the current sitemap if we never opened one 2015-02-22 15:43:01 -06:00
cjuergen
72913cda76 Fix for DS-2419 JSP UI ignores authorization.admin.usage. Just an incomplete name for the configuration parameter which determines the accessibility of usage statistics. 2015-02-22 15:42:31 -06:00
Tim Donohue
03097aaa35 DS-2448 - Fix for JSPUI path traversal issue from Pascal-Nicolas Becker 2015-02-20 22:38:42 +00:00
Tim Donohue
f6d3f67b52 Add in missing imports from previous commit 2015-02-20 20:20:26 +00:00
Luigi Andrea Pascarelli
62e0ac462e DS-1702 add code to prevent XSS attach on recent submission 2015-02-20 19:07:17 +00:00
Bill Tantzen
54310b014b fixed formatting 2015-02-20 12:30:14 -06:00
Bill Tantzen
beaf54f624 added synchroniztion for ArrayLists: agents and domains 2015-02-20 12:30:03 -06:00
Tim Donohue
114f1e0985 XMLUI path bug fixes and security fixes for DS-2445 DS-2130 DS-1896 2015-02-20 17:31:30 +00:00
Hardy Pottinger
1fdfe05c4c clarified the test expression, as a kindness to future generations 2015-02-17 14:45:42 -06:00
Hardy Pottinger
9c1f91d40b added back null handling, which I inadevertently dropped in favor of emptystring handling 2015-02-17 14:45:31 -06:00
Hardy Pottinger
39711b332f [DS-2034] refactored notnull method to notempty, at the suggestion of mhwood 2015-02-17 14:45:19 -06:00
Hardy Pottinger
6cfda147b4 [DS-2034] added emptystring handling to the notnull method in ControlPanel.java 2015-02-17 14:45:09 -06:00
Tim Donohue
eabdc610a0 Merge pull request #857 from tdonohue/DS-2427
Fix DS-2427 for 5.1 by consolidating problematic schema code into DatabaseUtils...
2015-02-11 15:51:25 -06:00
Tim Donohue
da74f5aa7e Add back in missing "canonicalize()" for Oracle 2015-02-04 10:58:11 -06:00
Tim Donohue
14c575a7c4 Fix DS-2427 for 5.1 by consolidating problematic code into DatabaseUtils.getSchemaName() so that it can be replaced easily in future. Also cleaned up config comments 2015-02-04 10:44:26 -06:00
Christian Scheible
d8c8d28c13 [DS-2438] fixed problem with immense metadata values for oai solr core 2015-02-04 10:05:50 +01:00
Pascal-Nicolas Becker
bf56f1f7e3 DS-640: Fixes Internal System Error if browse index is missing in JSPUI. 2015-02-03 15:29:21 +01:00
Pascal-Nicolas Becker
8046d154ee DS-2435: JSPUI send 400 Bad Request for unexisting browse index. 2015-02-03 15:29:13 +01:00
ctu-developers
589117e204 Add canonicalize for "db.schema" property
DS-2201: Unable to complete installation of DSpace with non-empty variable "db.schema" configuration file "build.properties"
2015-01-28 11:20:19 -06:00
Christian Scheible
e9e5423f97 [DS-2425] fixed typos in xoai.xml 2015-01-27 11:45:44 +01:00
Ondřej Košarko
c08f447cec ResumptionToken link for other verbs 2015-01-20 16:37:54 +01:00
Ivan Masár
cf25175155 [maven-release-plugin] prepare for next development iteration 2015-01-20 15:27:37 +01:00
141 changed files with 2811 additions and 986 deletions

View File

@@ -1,4 +1,5 @@
language: java language: java
sudo: false
env: env:
# Give Maven 1GB of memory to work with # Give Maven 1GB of memory to work with

View File

@@ -1,7 +1,7 @@
DSpace source code license: DSpace source code license:
Copyright (c) 2002-2013, DuraSpace. All rights reserved. Copyright (c) 2002-2015, DuraSpace. All rights reserved.
Redistribution and use in source and binary forms, with or without Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are modification, are permitted provided that the following conditions are

View File

@@ -266,22 +266,24 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Morfologik Stemming Dictionary for Polish (org.carrot2:morfologik-polish:1.7.1 - http://morfologik.blogspot.com/morfologik-polish/) * Morfologik Stemming Dictionary for Polish (org.carrot2:morfologik-polish:1.7.1 - http://morfologik.blogspot.com/morfologik-polish/)
* Morfologik Stemming APIs (org.carrot2:morfologik-stemming:1.7.1 - http://morfologik.blogspot.com/morfologik-stemming/) * Morfologik Stemming APIs (org.carrot2:morfologik-stemming:1.7.1 - http://morfologik.blogspot.com/morfologik-stemming/)
* databene ContiPerf (org.databene:contiperf:2.2.0 - http://databene.org/contiperf) * databene ContiPerf (org.databene:contiperf:2.2.0 - http://databene.org/contiperf)
* DSpace Kernel :: API and Implementation (org.dspace:dspace-api:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api) * DSpace Kernel :: API and Implementation (org.dspace:dspace-api:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api)
* DSpace JSP-UI (org.dspace:dspace-jspui:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui) * DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:5.0.4 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-api-lang)
* DSpace OAI-PMH (org.dspace:dspace-oai:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai) * DSpace JSP-UI (org.dspace:dspace-jspui:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui)
* DSpace RDF (org.dspace:dspace-rdf:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf) * DSpace OAI-PMH (org.dspace:dspace-oai:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai)
* DSpace REST :: API and Implementation (org.dspace:dspace-rest:5.0-rc4-SNAPSHOT - http://demo.dspace.org) * DSpace RDF (org.dspace:dspace-rdf:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf)
* DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services) * DSpace REST :: API and Implementation (org.dspace:dspace-rest:5.3-SNAPSHOT - http://demo.dspace.org)
* Apache Solr Webapp (org.dspace:dspace-solr:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr) * DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services)
* DSpace SWORD (org.dspace:dspace-sword:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword) * Apache Solr Webapp (org.dspace:dspace-solr:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr)
* DSpace SWORD v2 (org.dspace:dspace-swordv2:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2) * DSpace SWORD (org.dspace:dspace-sword:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword)
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui) * DSpace SWORD v2 (org.dspace:dspace-swordv2:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2)
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:5.3-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui)
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:5.0.5 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-xmlui-lang)
* handle (org.dspace:handle:6.2 - no url defined) * handle (org.dspace:handle:6.2 - no url defined)
* jargon (org.dspace:jargon:1.4.25 - no url defined) * jargon (org.dspace:jargon:1.4.25 - no url defined)
* mets (org.dspace:mets:1.5.2 - no url defined) * mets (org.dspace:mets:1.5.2 - no url defined)
* oclc-harvester2 (org.dspace:oclc-harvester2:0.1.12 - no url defined) * oclc-harvester2 (org.dspace:oclc-harvester2:0.1.12 - no url defined)
* Repackaged Cocoon Servlet Service Implementation (org.dspace.dependencies.cocoon:dspace-cocoon-servlet-service-impl:1.0.3 - http://projects.dspace.org/dspace-pom/dspace-cocoon-servlet-service-impl) * Repackaged Cocoon Servlet Service Implementation (org.dspace.dependencies.cocoon:dspace-cocoon-servlet-service-impl:1.0.3 - http://projects.dspace.org/dspace-pom/dspace-cocoon-servlet-service-impl)
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:5.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions) * DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:5.3-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
* Hamcrest All (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all) * Hamcrest All (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all)
* Hamcrest Core (org.hamcrest:hamcrest-core:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-core) * Hamcrest Core (org.hamcrest:hamcrest-core:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-core)
* JBibTeX (org.jbibtex:jbibtex:1.0.10 - http://www.jbibtex.org) * JBibTeX (org.jbibtex:jbibtex:1.0.10 - http://www.jbibtex.org)
@@ -386,8 +388,3 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Dough Lea's util.concurrent package (concurrent:concurrent:1.3.4 - no url defined) * Dough Lea's util.concurrent package (concurrent:concurrent:1.3.4 - no url defined)
* Reflections (org.reflections:reflections:0.9.9-RC1 - http://code.google.com/p/reflections/reflections/) * Reflections (org.reflections:reflections:0.9.9-RC1 - http://code.google.com/p/reflections/reflections/)
* XZ for Java (org.tukaani:xz:1.4 - http://tukaani.org/xz/java.html) * XZ for Java (org.tukaani:xz:1.4 - http://tukaani.org/xz/java.html)
Unknown license:
* DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:5.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-api-lang)
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:5.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/dspace-xmlui-lang)

View File

@@ -66,14 +66,12 @@ db.password=dspace
#db.username=dspace #db.username=dspace
#db.password=dspace #db.password=dspace
# Schema name - if your database contains multiple schemas, you can avoid problems with # Schema name - if your database contains multiple schemas, you can avoid
# retrieving the definitions of duplicate object names by specifying # problems with retrieving the definitions of duplicate object names by
# the schema name here that is used for DSpace by uncommenting the following entry # specifying the schema name that is used for DSpace.
# ORACLE USAGE NOTE: In Oracle, schema is equivalent to "username". This means
# NOTE: this configuration option is for PostgreSQL only. For Oracle, schema is equivalent # specifying a "db.schema" is often unnecessary (i.e. you can leave it blank),
# to user name. DSpace depends on the PostgreSQL understanding of schema. If you are using # UNLESS your Oracle DB Account (in db.username) has access to multiple schemas.
# Oracle, just leave this this value blank.
db.schema = db.schema =
# Maximum number of DB connections in pool # Maximum number of DB connections in pool

View File

@@ -12,7 +12,7 @@
<parent> <parent>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<version>5.0</version> <version>5.3</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>

View File

@@ -1169,10 +1169,8 @@ public class MetadataImport
*/ */
private static boolean isAuthorityControlledField(String md) private static boolean isAuthorityControlledField(String md)
{ {
int pos = md.indexOf("["); String mdf = StringUtils.substringAfter(md, ":");
String mdf = (pos > -1 ? md.substring(0, pos) : md); mdf = StringUtils.substringBefore(mdf, "[");
pos = md.indexOf(":");
mdf = (pos > -1 ? md.substring(pos+1) : md);
return authorityControlled.contains(mdf); return authorityControlled.contains(mdf);
} }

View File

@@ -11,6 +11,7 @@ import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.List; import java.util.List;
import java.util.TreeMap;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
import org.dspace.servicemanager.DSpaceKernelImpl; import org.dspace.servicemanager.DSpaceKernelImpl;
import org.dspace.servicemanager.DSpaceKernelInit; import org.dspace.servicemanager.DSpaceKernelInit;
@@ -275,9 +276,21 @@ public class ScriptLauncher
*/ */
private static void display() private static void display()
{ {
// List all command elements
List<Element> commands = commandConfigs.getRootElement().getChildren("command"); List<Element> commands = commandConfigs.getRootElement().getChildren("command");
System.out.println("Usage: dspace [command-name] {parameters}");
// Sort the commands by name.
// We cannot just use commands.sort() because it tries to remove and
// reinsert Elements within other Elements, and that doesn't work.
TreeMap<String, Element> sortedCommands = new TreeMap<>();
for (Element command : commands) for (Element command : commands)
{
sortedCommands.put(command.getChild("name").getValue(), command);
}
// Display the sorted list
System.out.println("Usage: dspace [command-name] {parameters}");
for (Element command : sortedCommands.values())
{ {
System.out.println(" - " + command.getChild("name").getValue() + System.out.println(" - " + command.getChild("name").getValue() +
": " + command.getChild("description").getValue()); ": " + command.getChild("description").getValue());

View File

@@ -7,9 +7,10 @@
*/ */
package org.dspace.app.mediafilter; package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.File; import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream; import java.io.InputStream;
import java.nio.file.Files;
/** /**
@@ -30,8 +31,23 @@ public class ImageMagickImageThumbnailFilter extends ImageMagickThumbnailFilter
throws Exception throws Exception
{ {
File f = inputStreamToTempFile(source, "imthumb", ".tmp"); File f = inputStreamToTempFile(source, "imthumb", ".tmp");
File f2 = getThumbnailFile(f); File f2 = null;
return new FileInputStream(f2); try
{
f2 = getThumbnailFile(f);
byte[] bytes = Files.readAllBytes(f2.toPath());
return new ByteArrayInputStream(bytes);
}
finally
{
//noinspection ResultOfMethodCallIgnored
f.delete();
if (f2 != null)
{
//noinspection ResultOfMethodCallIgnored
f2.delete();
}
}
} }

View File

@@ -7,18 +7,40 @@
*/ */
package org.dspace.app.mediafilter; package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.File; import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream; import java.io.InputStream;
import java.nio.file.Files;
public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter { public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter {
public InputStream getDestinationStream(InputStream source) public InputStream getDestinationStream(InputStream source)
throws Exception throws Exception
{ {
File f = inputStreamToTempFile(source, "impdfthumb", ".pdf"); File f = inputStreamToTempFile(source, "impdfthumb", ".pdf");
File f2 = getImageFile(f, 0); File f2 = null;
File f3 = getThumbnailFile(f2); File f3 = null;
return new FileInputStream(f3); try
{
f2 = getImageFile(f, 0);
f3 = getThumbnailFile(f2);
byte[] bytes = Files.readAllBytes(f3.toPath());
return new ByteArrayInputStream(bytes);
}
finally
{
//noinspection ResultOfMethodCallIgnored
f.delete();
if (f2 != null)
{
//noinspection ResultOfMethodCallIgnored
f2.delete();
}
if (f3 != null)
{
//noinspection ResultOfMethodCallIgnored
f3.delete();
}
}
} }
public static final String[] PDF = {"Adobe PDF"}; public static final String[] PDF = {"Adobe PDF"};

View File

@@ -38,6 +38,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
{ {
private static int width = 180; private static int width = 180;
private static int height = 120; private static int height = 120;
private static boolean flatten = true;
static String bitstreamDescription = "IM Thumbnail"; static String bitstreamDescription = "IM Thumbnail";
static final String defaultPattern = "Generated Thumbnail"; static final String defaultPattern = "Generated Thumbnail";
static Pattern replaceRegex = Pattern.compile(defaultPattern); static Pattern replaceRegex = Pattern.compile(defaultPattern);
@@ -48,6 +49,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
ProcessStarter.setGlobalSearchPath(s); ProcessStarter.setGlobalSearchPath(s);
width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width); width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width);
height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height); height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height);
flatten = ConfigurationManager.getBooleanProperty(pre + ".flatten", flatten);
String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription"); String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription");
if (description != null) { if (description != null) {
bitstreamDescription = description; bitstreamDescription = description;
@@ -132,6 +134,10 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
IMOperation op = new IMOperation(); IMOperation op = new IMOperation();
String s = "[" + page + "]"; String s = "[" + page + "]";
op.addImage(f.getAbsolutePath()+s); op.addImage(f.getAbsolutePath()+s);
if (flatten)
{
op.flatten();
}
op.addImage(f2.getAbsolutePath()); op.addImage(f2.getAbsolutePath());
if (MediaFilterManager.isVerbose) { if (MediaFilterManager.isVerbose) {
System.out.println("IM Image Param: "+op); System.out.println("IM Image Param: "+op);

View File

@@ -151,8 +151,11 @@ public abstract class AbstractGenerator
* if an error occurs writing * if an error occurs writing
*/ */
public int finish() throws IOException public int finish() throws IOException
{
if (null != currentOutput)
{ {
closeCurrentFile(); closeCurrentFile();
}
OutputStream fo = new FileOutputStream(new File(outputDir, OutputStream fo = new FileOutputStream(new File(outputDir,
getIndexFilename())); getIndexFilename()));

View File

@@ -11,6 +11,7 @@ import java.sql.SQLException;
import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap; import com.google.common.collect.ListMultimap;
import org.dspace.authorize.AuthorizeManager; import org.dspace.authorize.AuthorizeManager;
import org.dspace.content.*; import org.dspace.content.*;
@@ -22,6 +23,7 @@ import java.util.Enumeration;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Properties; import java.util.Properties;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
@@ -32,10 +34,10 @@ import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Set; import java.util.Set;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.handle.HandleManager; import org.dspace.handle.HandleManager;
import org.jdom.Element; import org.jdom.Element;
/** /**
@@ -1041,7 +1043,6 @@ public class GoogleMetadata
*/ */
private Bitstream findLinkableFulltext(Item item) throws SQLException { private Bitstream findLinkableFulltext(Item item) throws SQLException {
Bitstream bestSoFar = null; Bitstream bestSoFar = null;
int bitstreamCount = 0;
Bundle[] contentBundles = item.getBundles("ORIGINAL"); Bundle[] contentBundles = item.getBundles("ORIGINAL");
for (Bundle bundle : contentBundles) { for (Bundle bundle : contentBundles) {
int primaryBitstreamId = bundle.getPrimaryBitstreamID(); int primaryBitstreamId = bundle.getPrimaryBitstreamID();
@@ -1051,14 +1052,14 @@ public class GoogleMetadata
if (isPublic(candidate)) { if (isPublic(candidate)) {
return candidate; return candidate;
} }
} else if (bestSoFar == null) { } else
{
if (bestSoFar == null && isPublic(candidate)) { //if bestSoFar is null but the candidate is not public you don't use it and try to find another
bestSoFar = candidate; bestSoFar = candidate;
} }
bitstreamCount++;
} }
} }
if (bitstreamCount > 1 || !isPublic(bestSoFar)) {
bestSoFar = null;
} }
return bestSoFar; return bestSoFar;

View File

@@ -20,6 +20,7 @@ import org.dspace.eperson.Group;
import org.dspace.storage.rdbms.DatabaseManager; import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow; import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator; import org.dspace.storage.rdbms.TableRowIterator;
import org.dspace.workflow.WorkflowItem;
/** /**
* AuthorizeManager handles all authorization checks for DSpace. For better * AuthorizeManager handles all authorization checks for DSpace. For better
@@ -295,8 +296,43 @@ public class AuthorizeManager
} }
} }
// In case the dso is an bundle or bitstream we must ignore custom
// policies if it does not belong to at least one installed item (see
// DS-2614).
// In case the dso is an item and a corresponding workspace or workflow
// item exist, we have to ignore custom policies (see DS-2614).
boolean ignoreCustomPolicies = false;
if (o instanceof Bitstream)
{
Bitstream b = (Bitstream) o;
// Ensure that this is not a collection or community logo
DSpaceObject parent = b.getParentObject();
if (!(parent instanceof Collection) && !(parent instanceof Community))
{
ignoreCustomPolicies = !isAnyItemInstalled(c, b.getBundles());
}
}
if (o instanceof Bundle)
{
ignoreCustomPolicies = !isAnyItemInstalled(c, new Bundle[] {(Bundle) o});
}
if (o instanceof Item)
{
if (WorkspaceItem.findByItem(c, (Item) o) != null ||
WorkflowItem.findByItem(c, (Item) o) != null)
{
ignoreCustomPolicies = true;
}
}
for (ResourcePolicy rp : getPoliciesActionFilter(c, o, action)) for (ResourcePolicy rp : getPoliciesActionFilter(c, o, action))
{ {
if (ignoreCustomPolicies
&& ResourcePolicy.TYPE_CUSTOM.equals(rp.getRpType()))
{
continue;
}
// check policies for date validity // check policies for date validity
if (rp.isDateValid()) if (rp.isDateValid())
{ {
@@ -319,6 +355,25 @@ public class AuthorizeManager
return false; return false;
} }
// check whether any bundle belongs to any item that passed submission
// and workflow process
protected static boolean isAnyItemInstalled(Context ctx, Bundle[] bundles)
throws SQLException
{
for (Bundle bundle : bundles)
{
for (Item item : bundle.getItems())
{
if (WorkspaceItem.findByItem(ctx, item) == null
&& WorkflowItem.findByItem(ctx, item) == null)
{
return true;
}
}
}
return false;
}
/////////////////////////////////////////////// ///////////////////////////////////////////////
// admin check methods // admin check methods
/////////////////////////////////////////////// ///////////////////////////////////////////////
@@ -837,7 +892,7 @@ public class AuthorizeManager
throws SQLException throws SQLException
{ {
DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE " DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE "
+ "resource_type_id= ? AND resource_id= ? AND rptype <> ? ", + "resource_type_id= ? AND resource_id= ? AND (rptype <> ? OR rptype IS NULL)",
o.getType(), o.getID(), type); o.getType(), o.getID(), type);
} }

View File

@@ -354,7 +354,7 @@ public class BrowserScope
*/ */
public void setResultsPerPage(int resultsPerPage) public void setResultsPerPage(int resultsPerPage)
{ {
if (resultsPerPage > -1 || browseIndex.isTagCloudEnabled()) if (resultsPerPage > -1 || (browseIndex != null && browseIndex.isTagCloudEnabled()))
{ {
this.resultsPerPage = resultsPerPage; this.resultsPerPage = resultsPerPage;
} }

View File

@@ -32,6 +32,8 @@ import org.dspace.utils.DSpace;
* *
* @author Andrea Bollini (CILEA) * @author Andrea Bollini (CILEA)
* @author Adán Román Ruiz at arvo.es (bugfix) * @author Adán Román Ruiz at arvo.es (bugfix)
* @author Panagiotis Koutsourakis (National Documentation Centre) (bugfix)
* @author Kostas Stamatis (National Documentation Centre) (bugfix)
* *
*/ */
public class SolrBrowseDAO implements BrowseDAO public class SolrBrowseDAO implements BrowseDAO
@@ -336,6 +338,22 @@ public class SolrBrowseDAO implements BrowseDAO
addStatusFilter(query); addStatusFilter(query);
query.setMaxResults(0); query.setMaxResults(0);
query.addFilterQueries("search.resourcetype:" + Constants.ITEM); query.addFilterQueries("search.resourcetype:" + Constants.ITEM);
// We need to take into account the fact that we may be in a subset of the items
if (authority != null)
{
query.addFilterQueries("{!field f="+facetField + "_authority_filter}"
+ authority);
}
else if (this.value != null && !valuePartial)
{
query.addFilterQueries("{!field f="+facetField + "_value_filter}" + this.value);
}
else if (valuePartial)
{
query.addFilterQueries("{!field f="+facetField + "_partial}" + this.value);
}
if (isAscending) if (isAscending)
{ {
query.setQuery("bi_"+column + "_sort" + ": [* TO \"" + value + "\"}"); query.setQuery("bi_"+column + "_sort" + ": [* TO \"" + value + "\"}");
@@ -343,6 +361,7 @@ public class SolrBrowseDAO implements BrowseDAO
else else
{ {
query.setQuery("bi_" + column + "_sort" + ": {\"" + value + "\" TO *]"); query.setQuery("bi_" + column + "_sort" + ": {\"" + value + "\" TO *]");
query.addFilterQueries("-(bi_" + column + "_sort" + ":" + value + "*)");
} }
boolean includeUnDiscoverable = itemsWithdrawn || !itemsDiscoverable; boolean includeUnDiscoverable = itemsWithdrawn || !itemsDiscoverable;
DiscoverResult resp = null; DiscoverResult resp = null;

View File

@@ -28,6 +28,7 @@ import org.dspace.workflow.WorkflowItem;
import org.dspace.xmlworkflow.storedcomponents.CollectionRole; import org.dspace.xmlworkflow.storedcomponents.CollectionRole;
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
import java.io.Serializable;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.sql.PreparedStatement; import java.sql.PreparedStatement;
@@ -294,31 +295,48 @@ public class Collection extends DSpaceObject
* @return the collections in the system * @return the collections in the system
* @throws SQLException * @throws SQLException
*/ */
public static Collection[] findAll(Context context) throws SQLException { public static Collection[] findAll(Context context) throws SQLException
{
TableRowIterator tri = null; TableRowIterator tri = null;
try { List<Collection> collections = null;
String query = "SELECT c.* FROM collection c " + List<Serializable> params = new ArrayList<Serializable>();
"LEFT JOIN metadatavalue m on (m.resource_id = c.collection_id and m.resource_type_id = ? and m.metadata_field_id = ?) "; StringBuffer query = new StringBuffer(
if(DatabaseManager.isOracle()){ "SELECT c.*" +
query += " ORDER BY cast(m.text_value as varchar2(128))"; "FROM collection c " +
}else{ "LEFT JOIN metadatavalue m ON (" +
query += " ORDER BY m.text_value"; "m.resource_id = c.collection_id AND " +
} "m.resource_type_id = ? AND " +
"m.metadata_field_id = ?" +
tri = DatabaseManager.query(context, ")"
query,
Constants.COLLECTION,
MetadataField.findByElement(context, MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(), "title", null).getFieldID()
); );
} catch (SQLException e) {
log.error("Find all Collections - ",e); if (DatabaseManager.isOracle())
throw e; {
query.append(" ORDER BY cast(m.text_value as varchar2(128))");
}
else
{
query.append(" ORDER BY m.text_value");
} }
List<Collection> collections = new ArrayList<Collection>(); params.add(Constants.COLLECTION);
params.add(
MetadataField.findByElement(
context,
MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(),
"title",
null
).getFieldID()
);
try try
{ {
tri = DatabaseManager.query(
context, query.toString(), params.toArray()
);
collections = new ArrayList<Collection>();
while (tri.hasNext()) while (tri.hasNext())
{ {
TableRow row = tri.next(); TableRow row = tri.next();
@@ -337,6 +355,11 @@ public class Collection extends DSpaceObject
} }
} }
} }
catch (SQLException e)
{
log.error("Find all Collections - ", e);
throw e;
}
finally finally
{ {
// close the TableRowIterator to free up resources // close the TableRowIterator to free up resources
@@ -363,31 +386,47 @@ public class Collection extends DSpaceObject
public static Collection[] findAll(Context context, Integer limit, Integer offset) throws SQLException public static Collection[] findAll(Context context, Integer limit, Integer offset) throws SQLException
{ {
TableRowIterator tri = null; TableRowIterator tri = null;
try{ List<Collection> collections = null;
String query = "SELECT c.* FROM collection c " + List<Serializable> params = new ArrayList<Serializable>();
"LEFT JOIN metadatavalue m on (m.resource_id = c.collection_id and m.resource_type_id = ? and m.metadata_field_id = ?) "; StringBuffer query = new StringBuffer(
"SELECT c.*" +
if(DatabaseManager.isOracle()){ "FROM collection c " +
query += " ORDER BY cast(m.text_value as varchar2(128))"; "LEFT JOIN metadatavalue m ON (" +
}else{ "m.resource_id = c.collection_id AND " +
query += " ORDER BY m.text_value"; "m.resource_type_id = ? AND " +
} "m.metadata_field_id = ?" +
query += " limit ? offset ?"; ")"
tri = DatabaseManager.query(context,
query,
Constants.COLLECTION,
MetadataField.findByElement(context, MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(), "title", null).getFieldID(),
limit,
offset
); );
} catch (SQLException e) {
log.error("Find all Collections offset/limit - ",e); if (DatabaseManager.isOracle())
throw e; {
query.append(" ORDER BY cast(m.text_value as varchar2(128))");
} }
List<Collection> collections = new ArrayList<Collection>(); else
{
query.append(" ORDER BY m.text_value");
}
params.add(Constants.COLLECTION);
params.add(
MetadataField.findByElement(
context,
MetadataSchema.find(context, MetadataSchema.DC_SCHEMA).getSchemaID(),
"title",
null
).getFieldID()
);
DatabaseManager.applyOffsetAndLimit(query, params, offset, limit);
try try
{ {
tri = DatabaseManager.query(
context, query.toString(), params.toArray()
);
collections = new ArrayList<Collection>();
while (tri.hasNext()) while (tri.hasNext())
{ {
TableRow row = tri.next(); TableRow row = tri.next();
@@ -406,6 +445,11 @@ public class Collection extends DSpaceObject
} }
} }
} }
catch (SQLException e)
{
log.error("Find all Collections offset/limit - ", e);
throw e;
}
finally finally
{ {
// close the TableRowIterator to free up resources // close the TableRowIterator to free up resources
@@ -450,13 +494,20 @@ public class Collection extends DSpaceObject
*/ */
public ItemIterator getItems(Integer limit, Integer offset) throws SQLException public ItemIterator getItems(Integer limit, Integer offset) throws SQLException
{ {
String myQuery = "SELECT item.* FROM item, collection2item WHERE " List<Serializable> params = new ArrayList<Serializable>();
+ "item.item_id=collection2item.item_id AND " StringBuffer myQuery = new StringBuffer(
+ "collection2item.collection_id= ? " "SELECT item.* " +
+ "AND item.in_archive='1' limit ? offset ?"; "FROM item, collection2item " +
"WHERE item.item_id = collection2item.item_id " +
"AND collection2item.collection_id = ? " +
"AND item.in_archive = '1'"
);
TableRowIterator rows = DatabaseManager.queryTable(ourContext, "item", params.add(getID());
myQuery,getID(), limit, offset); DatabaseManager.applyOffsetAndLimit(myQuery, params, offset, limit);
TableRowIterator rows = DatabaseManager.query(ourContext,
myQuery.toString(), params.toArray());
return new ItemIterator(ourContext, rows); return new ItemIterator(ourContext, rows);
} }
@@ -1513,7 +1564,7 @@ public class Collection extends DSpaceObject
public static Collection[] findAuthorizedOptimized(Context context, int actionID) throws java.sql.SQLException public static Collection[] findAuthorizedOptimized(Context context, int actionID) throws java.sql.SQLException
{ {
if(! ConfigurationManager.getBooleanProperty("org.dspace.content.Collection.findAuthorizedPerformanceOptimize", true)) { if(! ConfigurationManager.getBooleanProperty("org.dspace.content.Collection.findAuthorizedPerformanceOptimize", false)) {
// Fallback to legacy query if config says so. The rationale could be that a site found a bug. // Fallback to legacy query if config says so. The rationale could be that a site found a bug.
return findAuthorized(context, null, actionID); return findAuthorized(context, null, actionID);
} }

View File

@@ -21,15 +21,7 @@ import java.util.zip.ZipFile;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream; import org.dspace.content.*;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.Bundle;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.FormatIdentifier;
import org.dspace.content.Item;
import org.dspace.content.WorkspaceItem;
import org.dspace.content.crosswalk.CrosswalkException; import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.content.crosswalk.MetadataValidationException; import org.dspace.content.crosswalk.MetadataValidationException;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
@@ -37,6 +29,8 @@ import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.LogManager; import org.dspace.core.LogManager;
import org.dspace.handle.HandleManager; import org.dspace.handle.HandleManager;
import org.dspace.workflow.WorkflowItem;
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
import org.jdom.Element; import org.jdom.Element;
/** /**
@@ -660,8 +654,24 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester
addBitstreams(context, item, manifest, pkgFile, params, callback); addBitstreams(context, item, manifest, pkgFile, params, callback);
// have subclass manage license since it may be extra package file. // have subclass manage license since it may be extra package file.
addLicense(context, item, license, (Collection) dso Collection owningCollection = (Collection) dso.getParentObject();
.getParentObject(), params); if(owningCollection == null)
{
//We are probably dealing with an item that isn't archived yet
InProgressSubmission inProgressSubmission = WorkspaceItem.findByItem(context, item);
if(inProgressSubmission == null)
{
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("xmlworkflow"))
{
inProgressSubmission = XmlWorkflowItem.findByItem(context, item);
}else{
inProgressSubmission = WorkflowItem.findByItem(context, item);
}
}
owningCollection = inProgressSubmission.getCollection();
}
addLicense(context, item, license, owningCollection, params);
// FIXME ? // FIXME ?
// should set lastModifiedTime e.g. when ingesting AIP. // should set lastModifiedTime e.g. when ingesting AIP.

View File

@@ -474,8 +474,18 @@ public class Email
System.out.println(" - To: " + to); System.out.println(" - To: " + to);
System.out.println(" - Subject: " + subject); System.out.println(" - Subject: " + subject);
System.out.println(" - Server: " + server); System.out.println(" - Server: " + server);
boolean disabled = ConfigurationManager.getBooleanProperty("mail.server.disabled", false);
try try
{ {
if( disabled)
{
System.err.println("\nError sending email:");
System.err.println(" - Error: cannot test email because mail.server.disabled is set to true");
System.err.println("\nPlease see the DSpace documentation for assistance.\n");
System.err.println("\n");
System.exit(1);
return;
}
e.send(); e.send();
} }
catch (MessagingException me) catch (MessagingException me)

View File

@@ -113,4 +113,11 @@ public interface SearchService {
* @return the indexed field * @return the indexed field
*/ */
String toSortFieldIndex(String metadataField, String type); String toSortFieldIndex(String metadataField, String type);
/**
* Utility method to escape any special characters in a user's query
* @param query
* @return query with any special characters escaped
*/
String escapeQueryChars(String query);
} }

View File

@@ -2320,4 +2320,13 @@ public class SolrServiceImpl implements SearchService, IndexingService {
throw new SearchServiceException(e.getMessage(), e); throw new SearchServiceException(e.getMessage(), e);
} }
} }
@Override
public String escapeQueryChars(String query) {
// Use Solr's built in query escape tool
// WARNING: You should only escape characters from user entered queries,
// otherwise you may accidentally BREAK field-based queries (which often
// rely on special characters to separate the field from the query value)
return ClientUtils.escapeQueryChars(query);
}
} }

View File

@@ -9,89 +9,167 @@ package org.dspace.handle;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.sql.SQLException;
import org.apache.log4j.Logger;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.storage.rdbms.DatabaseManager; import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow; import org.dspace.storage.rdbms.TableRow;
import org.dspace.search.DSIndexer; import org.dspace.discovery.IndexClient;
import org.dspace.browse.IndexBrowse;
/** /**
* A script to update the handle values in the database. This is typically used * A script to update the handle values in the database. This is typically used
* when moving from a test machine (handle = 123456789) to a production service. * when moving from a test machine (handle = 123456789) to a production service
* or when make a test clone from production service.
* *
* @author Stuart Lewis * @author Stuart Lewis
* @author Ivo Prajer (Czech Technical University in Prague)
*/ */
public class UpdateHandlePrefix public class UpdateHandlePrefix
{ {
private static final Logger log = Logger.getLogger(UpdateHandlePrefix.class);
/**
* When invoked as a command-line tool, updates handle prefix
*
* @param args the command-line arguments, none used
* @throws java.lang.Exception
*
*/
public static void main(String[] args) throws Exception public static void main(String[] args) throws Exception
{ {
// There should be two paramters // There should be two parameters
if (args.length < 2) if (args.length < 2)
{ {
System.out.println("\nUsage: update-handle-prefix <old handle> <new handle>\n"); System.out.println("\nUsage: update-handle-prefix <old handle> <new handle>\n");
System.exit(1);
} }
else else
{ {
// Confirm with the user that this is what they want to do
String oldH = args[0]; String oldH = args[0];
String newH = args[1]; String newH = args[1];
BufferedReader input = new BufferedReader(new InputStreamReader(System.in)); // Get info about changes
System.out.println("\nGetting information about handles from database...");
Context context = new Context(); Context context = new Context();
System.out.println("If you continue, all handles in your repository with prefix " + String sql = "SELECT count(*) as count " +
oldH + " will be updated to have handle prefix " + newH + "\n"); "FROM handle " +
String sql = "SELECT count(*) as count FROM handle " +
"WHERE handle LIKE '" + oldH + "%'"; "WHERE handle LIKE '" + oldH + "%'";
TableRow row = DatabaseManager.querySingle(context, sql, new Object[] {}); TableRow row = DatabaseManager.querySingle(context, sql, new Object[] {});
long count = row.getLongColumn("count"); long count = row.getLongColumn("count");
System.out.println(count + " items will be updated.\n");
System.out.print("Have you taken a backup, and are you ready to continue? [y/n]: "); if (count > 0)
{
// Print info text about changes
System.out.println(
"In your repository will be updated " + count + " handle" +
((count > 1) ? "s" : "") + " to new prefix " + newH +
" from original " + oldH + "!\n"
);
// Confirm with the user that this is what they want to do
System.out.print(
"Servlet container (e.g. Apache Tomcat, Jetty, Caucho Resin) must be running.\n" +
"If it is necessary, please make a backup of the database.\n" +
"Are you ready to continue? [y/n]: "
);
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
String choiceString = input.readLine(); String choiceString = input.readLine();
if (choiceString.equalsIgnoreCase("y")) if (choiceString.equalsIgnoreCase("y"))
{ {
try {
log.info("Updating handle prefix from " + oldH + " to " + newH);
// Make the changes // Make the changes
System.out.print("Updating handle table... "); System.out.print("\nUpdating handle table... ");
sql = "update handle set handle = '" + newH + "' || '/' || handle_id " + sql = "UPDATE handle " +
"where handle like '" + oldH + "/%'"; "SET handle = '" + newH + "' || '/' || handle_id " +
int updated = DatabaseManager.updateQuery(context, sql, new Object[] {}); "WHERE handle like '" + oldH + "/%'";
System.out.println(updated + " items updated"); int updHdl = DatabaseManager.updateQuery(context, sql, new Object[] {});
System.out.println(
updHdl + " item" + ((updHdl > 1) ? "s" : "") + " updated"
);
System.out.print("Updating metadatavalues table... "); System.out.print("Updating metadatavalues table... ");
sql = "UPDATE metadatavalue SET text_value= (SELECT 'http://hdl.handle.net/' || " + sql = "UPDATE metadatavalue " +
"handle FROM handle WHERE handle.resource_id=item_id AND " + "SET text_value = " +
"handle.resource_type_id=2) WHERE text_value LIKE 'http://hdl.handle.net/%';"; "(" +
updated = DatabaseManager.updateQuery(context, sql, new Object[] {}); "SELECT 'http://hdl.handle.net/' || handle " +
System.out.println(updated + " metadata values updated"); "FROM handle " +
"WHERE handle.resource_id = metadatavalue.resource_id " +
"AND handle.resource_type_id = 2" +
") " +
"WHERE text_value LIKE 'http://hdl.handle.net/" + oldH + "/%'" +
"AND EXISTS " +
"(" +
"SELECT 1 " +
"FROM handle " +
"WHERE handle.resource_id = metadatavalue.resource_id " +
"AND handle.resource_type_id = 2" +
")";
int updMeta = DatabaseManager.updateQuery(context, sql, new Object[] {});
System.out.println(
updMeta + " metadata value" + ((updMeta > 1) ? "s" : "") + " updated"
);
// Commit the changes // Commit the changes
context.complete(); context.complete();
System.out.print("Re-creating browse and search indexes... "); log.info(
"Done with updating handle prefix. " +
"It was changed " + updHdl + " handle" + ((updHdl > 1) ? "s" : "") +
" and " + updMeta + " metadata record" + ((updMeta > 1) ? "s" : "")
);
// Reinitialise the browse system }
IndexBrowse.main(new String[] {"-i"}); catch (SQLException sqle)
{
if ((context != null) && (context.isValid()))
{
context.abort();
context = null;
}
System.out.println("\nError during SQL operations.");
throw sqle;
}
System.out.println("Handles successfully updated in database.\n");
System.out.println("Re-creating browse and search indexes...");
// Reinitialise the browse system
try try
{ {
DSIndexer.main(new String[0]); // Reinitialise the search and browse system
IndexClient.main(new String[] {"-b"});
System.out.println("Browse and search indexes are ready now.");
// All done
System.out.println("\nAll done successfully. Please check the DSpace logs!\n");
} }
catch (Exception e) catch (Exception e)
{ {
// Not a lot we can do // Not a lot we can do
System.out.println("Error re-indexing:"); System.out.println("Error during re-indexing.");
e.printStackTrace(); System.out.println(
System.out.println("\nPlease manually run [dspace]/bin/index-all"); "\n\nAutomatic re-indexing failed. Please perform it manually.\n" +
"You should run one of the following commands:\n\n" +
" [dspace]/bin/dspace index-discovery -b\n\n" +
"If you are using Solr for browse (this is the default setting).\n" +
"When launching this command, your servlet container must be running.\n\n" +
" [dspace]/bin/dspace index-lucene-init\n\n" +
"If you enabled Lucene for search.\n" +
"When launching this command, your servlet container must be shutdown.\n"
);
throw e;
} }
// All done
System.out.println("\nHandles successfully updated.");
} }
else else
{ {
System.out.println("No changes have been made to your data."); System.out.println("No changes have been made to your data.\n");
}
}
else
{
System.out.println("Nothing to do! All handles are up-to-date.\n");
} }
} }
} }

View File

@@ -561,7 +561,7 @@ public class EZIDIdentifierProvider
/** /**
* Map selected DSpace metadata to fields recognized by DataCite. * Map selected DSpace metadata to fields recognized by DataCite.
*/ */
private Map<String, String> crosswalkMetadata(DSpaceObject dso) Map<String, String> crosswalkMetadata(DSpaceObject dso)
{ {
if ((null == dso) || !(dso instanceof Item)) if ((null == dso) || !(dso instanceof Item))
{ {
@@ -632,18 +632,42 @@ public class EZIDIdentifierProvider
mapped.put(DATACITE_PUBLICATION_YEAR, year); mapped.put(DATACITE_PUBLICATION_YEAR, year);
} }
// TODO find a way to get a current direct URL to the object and set _target // Supply _target link back to this object
// mapped.put("_target", url); String handle = dso.getHandle();
if (null == handle)
{
log.warn("{} #{} has no handle -- location not set.",
dso.getTypeText(), dso.getID());
}
else
{
String url = configurationService.getProperty("dspace.url")
+ "/handle/" + item.getHandle();
log.info("Supplying location: {}", url);
mapped.put("_target", url);
}
return mapped; return mapped;
} }
/**
* Provide a map from DSO metadata keys to EZID keys. This will drive the
* generation of EZID metadata for the minting of new identifiers.
*
* @param aCrosswalk
*/
@Required @Required
public void setCrosswalk(Map<String, String> aCrosswalk) public void setCrosswalk(Map<String, String> aCrosswalk)
{ {
crosswalk = aCrosswalk; crosswalk = aCrosswalk;
} }
/**
* Provide a map from DSO metadata keys to classes which can transform their
* values to something acceptable to EZID.
*
* @param transformMap
*/
public void setCrosswalkTransform(Map<String, Transform> transformMap) public void setCrosswalkTransform(Map<String, Transform> transformMap)
{ {
transforms = transformMap; transforms = transformMap;

View File

@@ -20,6 +20,7 @@ import org.dspace.content.Bundle;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.Site; import org.dspace.content.Site;
import org.dspace.content.WorkspaceItem;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.event.Consumer; import org.dspace.event.Consumer;
@@ -52,6 +53,8 @@ public class RDFConsumer implements Consumer
} }
int sType = event.getSubjectType(); int sType = event.getSubjectType();
log.debug(event.getEventTypeAsString() + " for "
+ event.getSubjectTypeAsString() + ":" + event.getSubjectID());
switch (sType) switch (sType)
{ {
case (Constants.BITSTREAM) : case (Constants.BITSTREAM) :
@@ -100,7 +103,7 @@ public class RDFConsumer implements Consumer
Bitstream bitstream = Bitstream.find(ctx, event.getSubjectID()); Bitstream bitstream = Bitstream.find(ctx, event.getSubjectID());
if (bitstream == null) if (bitstream == null)
{ {
log.warn("Cannot find bitstream " + event.getSubjectID() + "! " log.debug("Cannot find bitstream " + event.getSubjectID() + "! "
+ "Ignoring, as it is likely it was deleted " + "Ignoring, as it is likely it was deleted "
+ "and we'll cover it by a REMOVE event on its bundle."); + "and we'll cover it by a REMOVE event on its bundle.");
return; return;
@@ -111,6 +114,11 @@ public class RDFConsumer implements Consumer
Item[] items = b.getItems(); Item[] items = b.getItems();
for (Item i : items) for (Item i : items)
{ {
if (WorkspaceItem.findByItem(ctx, i) != null)
{
log.debug("Ignoring Item " + i.getID() + " as a corresponding workspace item exists.");
continue;
}
DSOIdentifier id = new DSOIdentifier(i, ctx); DSOIdentifier id = new DSOIdentifier(i, ctx);
if (!this.toDelete.contains(id) && !this.toConvert.contains(id)) if (!this.toDelete.contains(id) && !this.toConvert.contains(id))
{ {
@@ -148,7 +156,7 @@ public class RDFConsumer implements Consumer
Bundle bundle = Bundle.find(ctx, event.getSubjectID()); Bundle bundle = Bundle.find(ctx, event.getSubjectID());
if (bundle == null) if (bundle == null)
{ {
log.warn("Cannot find bundle " + event.getSubjectID() + "! " log.debug("Cannot find bundle " + event.getSubjectID() + "! "
+ "Ignoring, as it is likely it was deleted " + "Ignoring, as it is likely it was deleted "
+ "and we'll cover it by a REMOVE event on its item."); + "and we'll cover it by a REMOVE event on its item.");
return; return;
@@ -156,6 +164,11 @@ public class RDFConsumer implements Consumer
Item[] items = bundle.getItems(); Item[] items = bundle.getItems();
for (Item i : items) for (Item i : items)
{ {
if (WorkspaceItem.findByItem(ctx, i) != null)
{
log.debug("Ignoring Item " + i.getID() + " as a corresponding workspace item exists.");
continue;
}
DSOIdentifier id = new DSOIdentifier(i, ctx); DSOIdentifier id = new DSOIdentifier(i, ctx);
if (!this.toDelete.contains(id) && !this.toConvert.contains(id)) if (!this.toDelete.contains(id) && !this.toConvert.contains(id))
{ {
@@ -216,14 +229,24 @@ public class RDFConsumer implements Consumer
DSpaceObject dso = event.getSubject(ctx); DSpaceObject dso = event.getSubject(ctx);
if (dso == null) if (dso == null)
{ {
log.warn("Cannot find " + event.getSubjectTypeAsString() + " " log.debug("Cannot find " + event.getSubjectTypeAsString() + " "
+ event.getSubjectID() + "! " + "Ignoring, as it is " + event.getSubjectID() + "! " + "Ignoring, as it is "
+ "likely it was deleted and we'll cover it by another " + "likely it was deleted and we'll cover it by another "
+ "event with the type REMOVE."); + "event with the type REMOVE.");
return; return;
} }
DSOIdentifier id = new DSOIdentifier(dso, ctx);
// ignore unfinished submissions here. Every unfinished submission
// has an workspace item. The item flag "in_archive" doesn't help us
// here as this is also set to false if a newer version was submitted.
if (dso instanceof Item
&& WorkspaceItem.findByItem(ctx, (Item) dso) != null)
{
log.debug("Ignoring Item " + dso.getID() + " as a corresponding workspace item exists.");
return;
}
DSOIdentifier id = new DSOIdentifier(dso, ctx);
// If an item gets withdrawn, a MODIFIY event is fired. We have to // If an item gets withdrawn, a MODIFIY event is fired. We have to
// delete the item from the triple store instead of converting it. // delete the item from the triple store instead of converting it.
// we don't have to take care for reinstantions of items as they can // we don't have to take care for reinstantions of items as they can

View File

@@ -331,6 +331,7 @@ public class SolrLogger
{ {
doc1.addField("userAgent", request.getHeader("User-Agent")); doc1.addField("userAgent", request.getHeader("User-Agent"));
} }
doc1.addField("isBot",isSpiderBot);
// Save the location information if valid, save the event without // Save the location information if valid, save the event without
// location information if not valid // location information if not valid
if(locationService != null) if(locationService != null)
@@ -354,7 +355,7 @@ public class SolrLogger
doc1.addField("city", location.city); doc1.addField("city", location.city);
doc1.addField("latitude", location.latitude); doc1.addField("latitude", location.latitude);
doc1.addField("longitude", location.longitude); doc1.addField("longitude", location.longitude);
doc1.addField("isBot",isSpiderBot);
} }
@@ -416,6 +417,7 @@ public class SolrLogger
{ {
doc1.addField("userAgent", userAgent); doc1.addField("userAgent", userAgent);
} }
doc1.addField("isBot",isSpiderBot);
// Save the location information if valid, save the event without // Save the location information if valid, save the event without
// location information if not valid // location information if not valid
if(locationService != null) if(locationService != null)
@@ -439,7 +441,7 @@ public class SolrLogger
doc1.addField("city", location.city); doc1.addField("city", location.city);
doc1.addField("latitude", location.latitude); doc1.addField("latitude", location.latitude);
doc1.addField("longitude", location.longitude); doc1.addField("longitude", location.longitude);
doc1.addField("isBot",isSpiderBot);
} }
@@ -1338,6 +1340,7 @@ public class SolrLogger
//Upload the data in the csv files to our new solr core //Upload the data in the csv files to our new solr core
ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest("/update/csv"); ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest("/update/csv");
contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8"); contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
contentStreamUpdateRequest.setParam("skip", "_version_");
contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8"); contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

View File

@@ -15,6 +15,7 @@ import java.util.ArrayList;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.Collections;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
@@ -42,10 +43,10 @@ public class SpiderDetector {
private static IPTable table = null; private static IPTable table = null;
/** Collection of regular expressions to match known spiders' agents. */ /** Collection of regular expressions to match known spiders' agents. */
private static List<Pattern> agents = new ArrayList<Pattern>(); private static List<Pattern> agents = Collections.synchronizedList(new ArrayList<Pattern>());
/** Collection of regular expressions to match known spiders' domain names. */ /** Collection of regular expressions to match known spiders' domain names. */
private static List<Pattern> domains = new ArrayList<Pattern>(); private static List<Pattern> domains = Collections.synchronizedList(new ArrayList<Pattern>());
/** /**
* Utility method which reads lines from a file & returns them in a Set. * Utility method which reads lines from a file & returns them in a Set.
@@ -199,10 +200,12 @@ public class SpiderDetector {
{ {
// See if any agent patterns match // See if any agent patterns match
if (null != agent) if (null != agent)
{
synchronized(agents)
{ {
if (agents.isEmpty()) if (agents.isEmpty())
loadPatterns("agents", agents); loadPatterns("agents", agents);
}
for (Pattern candidate : agents) for (Pattern candidate : agents)
{ {
// prevent matcher() invocation from a null Pattern object // prevent matcher() invocation from a null Pattern object
@@ -230,11 +233,11 @@ public class SpiderDetector {
// No. See if any DNS names match // No. See if any DNS names match
if (null != hostname) if (null != hostname)
{ {
if (domains.isEmpty()) synchronized(domains)
{ {
if (domains.isEmpty())
loadPatterns("domains", domains); loadPatterns("domains", domains);
} }
for (Pattern candidate : domains) for (Pattern candidate : domains)
{ {
// prevent matcher() invocation from a null Pattern object // prevent matcher() invocation from a null Pattern object

View File

@@ -15,7 +15,6 @@ import java.sql.PreparedStatement;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.ResultSetMetaData; import java.sql.ResultSetMetaData;
import java.sql.SQLException; import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.Statement; import java.sql.Statement;
import java.sql.Time; import java.sql.Time;
import java.sql.Timestamp; import java.sql.Timestamp;
@@ -35,8 +34,6 @@ import javax.sql.DataSource;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.flywaydb.core.Flyway;
import org.flywaydb.core.api.MigrationInfo;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@@ -881,22 +878,22 @@ public class DatabaseManager
} }
/** /**
* Return the canonical name for a table. * Return the canonical name for a database object.
* *
* @param table * @param db_object
* The name of the table. * The name of the database object.
* @return The canonical name of the table. * @return The canonical name of the database object.
*/ */
static String canonicalize(String table) static String canonicalize(String db_object)
{ {
// Oracle expects upper-case table names // Oracle expects upper-case table names, schemas, etc.
if (isOracle) if (isOracle)
{ {
return (table == null) ? null : table.toUpperCase(); return (db_object == null) ? null : db_object.toUpperCase();
} }
// default database postgres wants lower-case table names // default database postgres wants lower-case table names
return (table == null) ? null : table.toLowerCase(); return (db_object == null) ? null : db_object.toLowerCase();
} }
//////////////////////////////////////// ////////////////////////////////////////
@@ -1237,10 +1234,6 @@ public class DatabaseManager
try try
{ {
String schema = ConfigurationManager.getProperty("db.schema");
if(StringUtils.isBlank(schema)){
schema = null;
}
String catalog = null; String catalog = null;
int dotIndex = table.indexOf('.'); int dotIndex = table.indexOf('.');
@@ -1254,6 +1247,9 @@ public class DatabaseManager
connection = getConnection(); connection = getConnection();
// Get current database schema name
String schema = DatabaseUtils.getSchemaName(connection);
DatabaseMetaData metadata = connection.getMetaData(); DatabaseMetaData metadata = connection.getMetaData();
Map<String, ColumnInfo> results = new HashMap<String, ColumnInfo>(); Map<String, ColumnInfo> results = new HashMap<String, ColumnInfo>();

View File

@@ -873,8 +873,10 @@ public class DatabaseUtils
* Get the Database Schema Name in use by this Connection, so that it can * Get the Database Schema Name in use by this Connection, so that it can
* be used to limit queries in other methods (e.g. tableExists()). * be used to limit queries in other methods (e.g. tableExists()).
* <P> * <P>
* For PostgreSQL, schema is simply what is configured in db.schema or "public" * NOTE: Once we upgrade to using Apache Commons DBCP / Pool version 2.0,
* For Oracle, schema is actually the database *USER* or owner. * this method WILL BE REMOVED in favor of java.sql.Connection's new
* "getSchema()" method.
* http://docs.oracle.com/javase/7/docs/api/java/sql/Connection.html#getSchema()
* *
* @param connection * @param connection
* Current Database Connection * Current Database Connection
@@ -886,27 +888,29 @@ public class DatabaseUtils
String schema = null; String schema = null;
DatabaseMetaData meta = connection.getMetaData(); DatabaseMetaData meta = connection.getMetaData();
// Determine our DB type // Check the configured "db.schema" FIRST for the value configured there
schema = DatabaseManager.canonicalize(ConfigurationManager.getProperty("db.schema"));
// If unspecified, determine "sane" defaults based on DB type
if(StringUtils.isBlank(schema))
{
String dbType = DatabaseManager.findDbKeyword(meta); String dbType = DatabaseManager.findDbKeyword(meta);
if(dbType.equals(DatabaseManager.DBMS_POSTGRES)) if(dbType.equals(DatabaseManager.DBMS_POSTGRES))
{ {
// Get the schema name from "db.schema" // For PostgreSQL, the default schema is named "public"
schema = ConfigurationManager.getProperty("db.schema"); // See: http://www.postgresql.org/docs/9.0/static/ddl-schemas.html
// If unspecified, default schema is "public"
if(StringUtils.isBlank(schema)){
schema = "public"; schema = "public";
} }
}
else if (dbType.equals(DatabaseManager.DBMS_ORACLE)) else if (dbType.equals(DatabaseManager.DBMS_ORACLE))
{ {
// Schema is actually the user account // For Oracle, default schema is actually the user account
// See: http://stackoverflow.com/a/13341390 // See: http://stackoverflow.com/a/13341390
schema = meta.getUserName(); schema = meta.getUserName();
} }
else else
schema = null; schema = null;
}
return schema; return schema;
} }

View File

@@ -23,18 +23,17 @@ import org.apache.commons.lang.StringUtils;
public class MigrationUtils public class MigrationUtils
{ {
/** /**
* Drop a given Database Constraint (based on the current database type). * Drop a given Database Column Constraint (based on the current database type).
* Returns a "checksum" for this migration which can be used as part of * Returns a "checksum" for this migration which can be used as part of
* a Flyway Java migration * a Flyway Java migration
* *
* @param connection the current Database connection * @param connection the current Database connection
* @param tableName the name of the table the constraint applies to * @param tableName the name of the table the constraint applies to
* @param columnName the name of the column the constraint applies to * @param columnName the name of the column the constraint applies to
* @param constraintSuffix Only used for PostgreSQL, whose constraint naming convention depends on a suffix (key, fkey, etc)
* @return migration checksum as an Integer * @return migration checksum as an Integer
* @throws SQLException if a database error occurs * @throws SQLException if a database error occurs
*/ */
public static Integer dropDBConstraint(Connection connection, String tableName, String columnName, String constraintSuffix) public static Integer dropDBConstraint(Connection connection, String tableName, String columnName)
throws SQLException throws SQLException
{ {
Integer checksum = -1; Integer checksum = -1;
@@ -48,13 +47,17 @@ public class MigrationUtils
String dbtype = DatabaseManager.findDbKeyword(meta); String dbtype = DatabaseManager.findDbKeyword(meta);
String constraintName = null; String constraintName = null;
String constraintNameSQL = null; String constraintNameSQL = null;
String schemaName = null;
switch(dbtype) switch(dbtype)
{ {
case DatabaseManager.DBMS_POSTGRES: case DatabaseManager.DBMS_POSTGRES:
// In Postgres, constraints are always named: // In Postgres, column constraints are listed in the "information_schema.key_column_usage" view
// {tablename}_{columnname(s)}_{suffix} // See: http://www.postgresql.org/docs/9.4/static/infoschema-key-column-usage.html
// see: http://stackoverflow.com/a/4108266/3750035 constraintNameSQL = "SELECT DISTINCT CONSTRAINT_NAME " +
constraintName = StringUtils.lowerCase(tableName) + "_" + StringUtils.lowerCase(columnName) + "_" + StringUtils.lowerCase(constraintSuffix); "FROM information_schema.key_column_usage " +
"WHERE TABLE_NAME = ? AND COLUMN_NAME = ? AND TABLE_SCHEMA = ?";
// For Postgres, we need to limit by the schema as well
schemaName = DatabaseUtils.getSchemaName(connection);
break; break;
case DatabaseManager.DBMS_ORACLE: case DatabaseManager.DBMS_ORACLE:
// In Oracle, constraints are listed in the USER_CONS_COLUMNS table // In Oracle, constraints are listed in the USER_CONS_COLUMNS table
@@ -72,13 +75,15 @@ public class MigrationUtils
throw new SQLException("DBMS " + dbtype + " is unsupported in this migration."); throw new SQLException("DBMS " + dbtype + " is unsupported in this migration.");
} }
// If we have a SQL query to run for the constraint name, then run it
if (constraintNameSQL!=null)
{
// Run the query to obtain the constraint name, passing it the parameters // Run the query to obtain the constraint name, passing it the parameters
PreparedStatement statement = connection.prepareStatement(constraintNameSQL); PreparedStatement statement = connection.prepareStatement(constraintNameSQL);
statement.setString(1, StringUtils.upperCase(tableName)); statement.setString(1, DatabaseUtils.canonicalize(connection, tableName));
statement.setString(2, StringUtils.upperCase(columnName)); statement.setString(2, DatabaseUtils.canonicalize(connection, columnName));
// Also limit by database schema, if a schemaName has been set (only needed for PostgreSQL)
if(schemaName!=null && !schemaName.isEmpty())
{
statement.setString(3, DatabaseUtils.canonicalize(connection, schemaName));
}
try try
{ {
ResultSet results = statement.executeQuery(); ResultSet results = statement.executeQuery();
@@ -92,15 +97,24 @@ public class MigrationUtils
{ {
statement.close(); statement.close();
} }
}
// As long as we have a constraint name, drop it // As long as we have a constraint name, drop it
if (constraintName!=null && !constraintName.isEmpty()) if (constraintName!=null && !constraintName.isEmpty())
{ {
// This drop constaint SQL should be the same in all databases // Canonicalize the constraintName
String dropConstraintSQL = "ALTER TABLE " + tableName + " DROP CONSTRAINT " + constraintName; constraintName = DatabaseUtils.canonicalize(connection, constraintName);
// If constraintName starts with a $, surround with double quotes
// (This is mostly for PostgreSQL, which sometimes names constraints $1, $2, etc)
if(constraintName.startsWith("$"))
{
constraintName = "\"" + constraintName + "\"";
}
PreparedStatement statement = connection.prepareStatement(dropConstraintSQL); // This drop constaint SQL should be the same in all databases
String dropConstraintSQL = "ALTER TABLE " + DatabaseUtils.canonicalize(connection, tableName) +
" DROP CONSTRAINT " + constraintName;
statement = connection.prepareStatement(dropConstraintSQL);
try try
{ {
statement.execute(); statement.execute();

View File

@@ -13,8 +13,6 @@ import java.sql.SQLException;
import org.dspace.storage.rdbms.MigrationUtils; import org.dspace.storage.rdbms.MigrationUtils;
import org.flywaydb.core.api.migration.MigrationChecksumProvider; import org.flywaydb.core.api.migration.MigrationChecksumProvider;
import org.flywaydb.core.api.migration.jdbc.JdbcMigration; import org.flywaydb.core.api.migration.jdbc.JdbcMigration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* This class is in support of the "V1.4__Upgrade_to_DSpace_1.4_schema.sql" * This class is in support of the "V1.4__Upgrade_to_DSpace_1.4_schema.sql"
@@ -42,9 +40,6 @@ import org.slf4j.LoggerFactory;
public class V1_3_9__Drop_constraint_for_DSpace_1_4_schema public class V1_3_9__Drop_constraint_for_DSpace_1_4_schema
implements JdbcMigration, MigrationChecksumProvider implements JdbcMigration, MigrationChecksumProvider
{ {
/** logging category */
private static final Logger log = LoggerFactory.getLogger(V1_3_9__Drop_constraint_for_DSpace_1_4_schema.class);
/* The checksum to report for this migration (when successful) */ /* The checksum to report for this migration (when successful) */
private int checksum = -1; private int checksum = -1;
@@ -57,7 +52,7 @@ public class V1_3_9__Drop_constraint_for_DSpace_1_4_schema
throws IOException, SQLException throws IOException, SQLException
{ {
// Drop the constraint associated with "name" column of "community" // Drop the constraint associated with "name" column of "community"
checksum = MigrationUtils.dropDBConstraint(connection, "community", "name", "key"); checksum = MigrationUtils.dropDBConstraint(connection, "community", "name");
} }
/** /**

View File

@@ -13,8 +13,6 @@ import java.sql.SQLException;
import org.dspace.storage.rdbms.MigrationUtils; import org.dspace.storage.rdbms.MigrationUtils;
import org.flywaydb.core.api.migration.MigrationChecksumProvider; import org.flywaydb.core.api.migration.MigrationChecksumProvider;
import org.flywaydb.core.api.migration.jdbc.JdbcMigration; import org.flywaydb.core.api.migration.jdbc.JdbcMigration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* This class is in support of the "V1.6__Upgrade_to_DSpace_1.6_schema.sql" * This class is in support of the "V1.6__Upgrade_to_DSpace_1.6_schema.sql"
@@ -42,9 +40,6 @@ import org.slf4j.LoggerFactory;
public class V1_5_9__Drop_constraint_for_DSpace_1_6_schema public class V1_5_9__Drop_constraint_for_DSpace_1_6_schema
implements JdbcMigration, MigrationChecksumProvider implements JdbcMigration, MigrationChecksumProvider
{ {
/** logging category */
private static final Logger log = LoggerFactory.getLogger(V1_5_9__Drop_constraint_for_DSpace_1_6_schema.class);
/* The checksum to report for this migration (when successful) */ /* The checksum to report for this migration (when successful) */
private int checksum = -1; private int checksum = -1;
@@ -57,11 +52,11 @@ public class V1_5_9__Drop_constraint_for_DSpace_1_6_schema
throws IOException, SQLException throws IOException, SQLException
{ {
// Drop the constraint associated with "collection_id" column of "community2collection" table // Drop the constraint associated with "collection_id" column of "community2collection" table
int return1 = MigrationUtils.dropDBConstraint(connection, "community2collection", "collection_id", "fkey"); int return1 = MigrationUtils.dropDBConstraint(connection, "community2collection", "collection_id");
// Drop the constraint associated with "child_comm_id" column of "community2community" table // Drop the constraint associated with "child_comm_id" column of "community2community" table
int return2 = MigrationUtils.dropDBConstraint(connection, "community2community", "child_comm_id", "fkey"); int return2 = MigrationUtils.dropDBConstraint(connection, "community2community", "child_comm_id");
// Drop the constraint associated with "item_id" column of "collection2item" table // Drop the constraint associated with "item_id" column of "collection2item" table
int return3 = MigrationUtils.dropDBConstraint(connection, "collection2item", "item_id", "fkey"); int return3 = MigrationUtils.dropDBConstraint(connection, "collection2item", "item_id");
// Checksum will just be the sum of those three return values // Checksum will just be the sum of those three return values
checksum = return1 + return2 + return3; checksum = return1 + return2 + return3;

View File

@@ -13,8 +13,6 @@ import java.sql.SQLException;
import org.dspace.storage.rdbms.MigrationUtils; import org.dspace.storage.rdbms.MigrationUtils;
import org.flywaydb.core.api.migration.MigrationChecksumProvider; import org.flywaydb.core.api.migration.MigrationChecksumProvider;
import org.flywaydb.core.api.migration.jdbc.JdbcMigration; import org.flywaydb.core.api.migration.jdbc.JdbcMigration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* This class is in support of the DS-1582 Metadata for All Objects feature. * This class is in support of the DS-1582 Metadata for All Objects feature.
@@ -43,9 +41,6 @@ import org.slf4j.LoggerFactory;
public class V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint public class V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint
implements JdbcMigration, MigrationChecksumProvider implements JdbcMigration, MigrationChecksumProvider
{ {
/** logging category */
private static final Logger log = LoggerFactory.getLogger(V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.class);
/* The checksum to report for this migration (when successful) */ /* The checksum to report for this migration (when successful) */
private int checksum = -1; private int checksum = -1;
@@ -58,7 +53,7 @@ public class V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint
throws IOException, SQLException throws IOException, SQLException
{ {
// Drop the constraint associated with "item_id" column of "metadatavalue" // Drop the constraint associated with "item_id" column of "metadatavalue"
checksum = MigrationUtils.dropDBConstraint(connection, "metadatavalue", "item_id", "fkey"); checksum = MigrationUtils.dropDBConstraint(connection, "metadatavalue", "item_id");
} }
/** /**

View File

@@ -0,0 +1,719 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.util;
import org.apache.commons.cli.*;
import org.apache.commons.cli.ParseException;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
import org.apache.solr.client.solrj.request.CoreAdminRequest;
import org.apache.solr.client.solrj.request.LukeRequest;
import org.apache.solr.client.solrj.response.CoreAdminResponse;
import org.apache.solr.client.solrj.response.FieldStatsInfo;
import org.apache.solr.client.solrj.response.LukeResponse;
import org.apache.solr.client.solrj.response.RangeFacet;
import org.apache.solr.common.luke.FieldFlag;
import org.apache.solr.common.params.CoreAdminParams;
import org.apache.solr.common.params.FacetParams;
import org.dspace.core.ConfigurationManager;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.net.URL;
import java.nio.file.FileStore;
import java.text.*;
import java.util.*;
/**
* Utility class to export, clear and import Solr indexes.
* @author Andrea Schweer schweer@waikato.ac.nz for the LCoNZ Institutional Research Repositories
*/
public class SolrImportExport
{
private static final DateFormat SOLR_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
private static final DateFormat SOLR_DATE_FORMAT_NO_MS = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
private static final DateFormat EXPORT_DATE_FORMAT = new SimpleDateFormat("yyyy-MM");
static
{
SOLR_DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("UTC"));
EXPORT_DATE_FORMAT.setTimeZone(TimeZone.getDefault());
}
private static final String ACTION_OPTION = "a";
private static final String CLEAR_OPTION = "c";
private static final String DIRECTORY_OPTION = "d";
private static final String HELP_OPTION = "h";
private static final String INDEX_NAME_OPTION = "i";
private static final String KEEP_OPTION = "k";
private static final String LAST_OPTION = "l";
public static final int ROWS_PER_FILE = 10_000;
private static final Logger log = Logger.getLogger(SolrImportExport.class);
/**
* Entry point for command-line invocation
* @param args command-line arguments; see help for description
* @throws ParseException if the command-line arguments cannot be parsed
*/
public static void main(String[] args) throws ParseException
{
CommandLineParser parser = new PosixParser();
Options options = makeOptions();
try
{
CommandLine line = parser.parse(options, args);
if (line.hasOption(HELP_OPTION))
{
printHelpAndExit(options, 0);
}
if (!line.hasOption(INDEX_NAME_OPTION))
{
System.err.println("This command requires the index-name option but none was present.");
printHelpAndExit(options, 1);
}
String[] indexNames = line.getOptionValues(INDEX_NAME_OPTION);
String directoryName = makeDirectoryName(line.getOptionValue(DIRECTORY_OPTION));
String action = line.getOptionValue(ACTION_OPTION, "export");
if ("import".equals(action))
{
for (String indexName : indexNames)
{
File importDir = new File(directoryName);
if (!importDir.exists() || !importDir.canRead())
{
System.err.println("Import directory " + directoryName
+ " doesn't exist or is not readable by the current user. Not importing index "
+ indexName);
continue; // skip this index
}
try
{
String solrUrl = makeSolrUrl(indexName);
boolean clear = line.hasOption(CLEAR_OPTION);
importIndex(indexName, importDir, solrUrl, clear, clear);
}
catch (IOException | SolrServerException | SolrImportExportException e)
{
System.err.println("Problem encountered while trying to import index " + indexName + ".");
e.printStackTrace(System.err);
}
}
}
else if ("export".equals(action))
{
for (String indexName : indexNames)
{
String lastValue = line.getOptionValue(LAST_OPTION);
File exportDir = new File(directoryName);
if (exportDir.exists() && !exportDir.canWrite())
{
System.err.println("Export directory " + directoryName
+ " is not writable by the current user. Not exporting index "
+ indexName);
continue;
}
if (!exportDir.exists())
{
boolean created = exportDir.mkdirs();
if (!created)
{
System.err.println("Export directory " + directoryName
+ " could not be created. Not exporting index " + indexName);
}
continue;
}
try
{
String solrUrl = makeSolrUrl(indexName);
String timeField = makeTimeField(indexName);
exportIndex(indexName, exportDir, solrUrl, timeField, lastValue);
}
catch (SolrServerException | IOException | SolrImportExportException e)
{
System.err.println("Problem encountered while trying to export index " + indexName + ".");
e.printStackTrace(System.err);
}
}
}
else if ("reindex".equals(action))
{
for (String indexName : indexNames)
{
try {
boolean keepExport = line.hasOption(KEEP_OPTION);
reindex(indexName, directoryName, keepExport);
} catch (IOException | SolrServerException | SolrImportExportException e) {
e.printStackTrace();
}
}
}
else
{
System.err.println("Unknown action " + action + "; must be import, export or reindex.");
printHelpAndExit(options, 1);
}
}
catch (ParseException e)
{
System.err.println("Cannot read command options");
printHelpAndExit(options, 1);
}
}
private static Options makeOptions() {
Options options = new Options();
options.addOption(ACTION_OPTION, "action", true, "The action to perform: import, export or reindex. Default: export.");
options.addOption(CLEAR_OPTION, "clear", false, "When importing, also clear the index first. Ignored when action is export or reindex.");
options.addOption(DIRECTORY_OPTION, "directory", true,
"The absolute path for the directory to use for import or export. If omitted, [dspace]/solr-export is used.");
options.addOption(HELP_OPTION, "help", false, "Get help on options for this command.");
options.addOption(INDEX_NAME_OPTION, "index-name", true,
"The names of the indexes to process. At least one is required. Available indexes are: authority, statistics.");
options.addOption(KEEP_OPTION, "keep", false, "When reindexing, keep the contents of the data export directory." +
" By default, the contents of this directory will be deleted once the reindex has finished." +
" Ignored when action is export or import.");
options.addOption(LAST_OPTION, "last", true, "When exporting, export records from the last [timeperiod] only." +
" This can be one of: 'd' (beginning of yesterday through to now);" +
" 'm' (beginning of the previous month through to end of the previous month);" +
" a number, in which case the last [number] of days are exported, through to now (use 0 for today's data)." +
" Date calculation is done in UTC. If omitted, all documents are exported.");
return options;
}
/**
* Reindexes the specified core
*
* @param indexName the name of the core to reindex
* @param exportDirName the name of the directory to use for export. If this directory doesn't exist, it will be created.
* @param keepExport whether to keep the contents of the exportDir after the reindex. If keepExport is false and the
* export directory was created by this method, the export directory will be deleted at the end of the reimport.
*/
private static void reindex(String indexName, String exportDirName, boolean keepExport)
throws IOException, SolrServerException, SolrImportExportException {
String tempIndexName = indexName + "-temp";
String origSolrUrl = makeSolrUrl(indexName);
String baseSolrUrl = StringUtils.substringBeforeLast(origSolrUrl, "/"); // need to get non-core solr URL
String tempSolrUrl = baseSolrUrl + "/" + tempIndexName;
String solrInstanceDir = ConfigurationManager.getProperty("dspace.dir") + File.separator + "solr" + File.separator + indexName;
// the [dspace]/solr/[indexName]/conf directory needs to be available on the local machine for this to work
// -- we need access to the schema.xml and solrconfig.xml file, plus files referenced from there
// if this directory can't be found, output an error message and skip this index
File solrInstance = new File(solrInstanceDir);
if (!solrInstance.exists() || !solrInstance.canRead() || !solrInstance.isDirectory())
{
throw new SolrImportExportException("Directory " + solrInstanceDir + "/conf/ doesn't exist or isn't readable." +
" The reindexing process requires the Solr configuration directory for this index to be present on the local machine" +
" even if Solr is running on a different host. Not reindexing index " + indexName);
}
String timeField = makeTimeField(indexName);
// Ensure the export directory exists and is writable
File exportDir = new File(exportDirName);
boolean createdExportDir = exportDir.mkdirs();
if (!createdExportDir && !exportDir.exists())
{
throw new SolrImportExportException("Could not create export directory " + exportDirName);
}
if (!exportDir.canWrite())
{
throw new SolrImportExportException("Can't write to export directory " + exportDirName);
}
try
{
HttpSolrServer adminSolr = new HttpSolrServer(baseSolrUrl);
// try to find out size of core and compare with free space in export directory
CoreAdminResponse status = CoreAdminRequest.getStatus(indexName, adminSolr);
Object coreSizeObj = status.getCoreStatus(indexName).get("sizeInBytes");
long coreSize = coreSizeObj != null ? Long.valueOf(coreSizeObj.toString()) : -1;
long usableExportSpace = exportDir.getUsableSpace();
if (coreSize >= 0 && usableExportSpace < coreSize)
{
System.err.println("Not enough space in export directory " + exportDirName
+ "; need at least as much space as the index ("
+ FileUtils.byteCountToDisplaySize(coreSize)
+ ") but usable space in export directory is only "
+ FileUtils.byteCountToDisplaySize(usableExportSpace)
+ ". Not continuing with reindex, please use the " + DIRECTORY_OPTION
+ " option to specify an alternative export directy with sufficient space.");
return;
}
// Create a temp directory to store temporary core data
File tempDataDir = new File(ConfigurationManager.getProperty("dspace.dir") + File.separator + "temp" + File.separator + "solr-data");
boolean createdTempDataDir = tempDataDir.mkdirs();
if (!createdTempDataDir && !tempDataDir.exists())
{
throw new SolrImportExportException("Could not create temporary data directory " + tempDataDir.getCanonicalPath());
}
if (!tempDataDir.canWrite())
{
throw new SolrImportExportException("Can't write to temporary data directory " + tempDataDir.getCanonicalPath());
}
try
{
// create a temporary core to hold documents coming in during the reindex
CoreAdminRequest.Create createRequest = new CoreAdminRequest.Create();
createRequest.setInstanceDir(solrInstanceDir);
createRequest.setDataDir(tempDataDir.getCanonicalPath());
createRequest.setCoreName(tempIndexName);
createRequest.process(adminSolr).getStatus();
}
catch (SolrServerException e)
{
// try to continue -- it may just be that the core already existed from a previous, failed attempt
System.err.println("Caught exception when trying to create temporary core: " + e.getMessage() + "; trying to recover.");
e.printStackTrace(System.err);
}
// swap actual core with temporary one
CoreAdminRequest swapRequest = new CoreAdminRequest();
swapRequest.setCoreName(indexName);
swapRequest.setOtherCoreName(tempIndexName);
swapRequest.setAction(CoreAdminParams.CoreAdminAction.SWAP);
swapRequest.process(adminSolr);
try
{
// export from the actual core (from temp core name, actual data dir)
exportIndex(indexName, exportDir, tempSolrUrl, timeField);
// clear actual core (temp core name, clearing actual data dir) & import
importIndex(indexName, exportDir, tempSolrUrl, true, true);
}
catch (Exception e)
{
// we ran into some problems with the export/import -- keep going to try and restore the solr cores
System.err.println("Encountered problem during reindex: " + e.getMessage() + ", will attempt to restore Solr cores");
e.printStackTrace(System.err);
}
// commit changes
HttpSolrServer origSolr = new HttpSolrServer(origSolrUrl);
origSolr.commit();
// swap back (statistics now going to actual core name in actual data dir)
swapRequest = new CoreAdminRequest();
swapRequest.setCoreName(tempIndexName);
swapRequest.setOtherCoreName(indexName);
swapRequest.setAction(CoreAdminParams.CoreAdminAction.SWAP);
swapRequest.process(adminSolr);
// export all docs from now-temp core into export directory -- this won't cause name collisions with the actual export
// because the core name for the temporary export has -temp in it while the actual core doesn't
exportIndex(tempIndexName, exportDir, tempSolrUrl, timeField);
// ...and import them into the now-again-actual core *without* clearing
importIndex(tempIndexName, exportDir, origSolrUrl, false, true);
// commit changes
origSolr.commit();
// unload now-temp core (temp core name)
CoreAdminRequest.unloadCore(tempIndexName, false, false, adminSolr);
// clean up temporary data dir if this method created it
if (createdTempDataDir && tempDataDir.exists())
{
FileUtils.deleteDirectory(tempDataDir);
}
}
finally
{
// clean up export dir if appropriate
if (!keepExport && createdExportDir && exportDir.exists())
{
FileUtils.deleteDirectory(exportDir);
}
}
}
/**
* Exports all documents in the given index to the specified target directory in batches of #ROWS_PER_FILE.
* See #makeExportFilename for the file names that are generated.
*
* @param indexName The index to export.
* @param toDir The target directory for the export. Will be created if it doesn't exist yet. The directory must be writeable.
* @param solrUrl The solr URL for the index to export. Must not be null.
* @param timeField The time field to use for sorting the export. Must not be null.
* @throws SolrServerException if there is a problem with exporting the index.
* @throws IOException if there is a problem creating the files or communicating with Solr.
* @throws SolrImportExportException if there is a problem in communicating with Solr.
*/
public static void exportIndex(String indexName, File toDir, String solrUrl, String timeField)
throws SolrServerException, SolrImportExportException, IOException {
exportIndex(indexName, toDir, solrUrl, timeField, null);
}
/**
* Import previously exported documents (or externally created CSV files that have the appropriate structure) into the specified index.
* @param indexName the index to import.
* @param fromDir the source directory. Must exist and be readable.
* The importer will look for files whose name starts with <pre>indexName</pre>
* and ends with .csv (to match what is generated by #makeExportFilename).
* @param solrUrl The solr URL for the index to export. Must not be null.
* @param clear if true, clear the index before importing.
* @param overwrite if true, skip _version_ field on import to disable Solr's optimistic concurrency functionality
* @throws IOException if there is a problem reading the files or communicating with Solr.
* @throws SolrServerException if there is a problem reading the files or communicating with Solr.
* @throws SolrImportExportException if there is a problem communicating with Solr.
*/
public static void importIndex(final String indexName, File fromDir, String solrUrl, boolean clear, boolean overwrite)
throws IOException, SolrServerException, SolrImportExportException
{
if (StringUtils.isBlank(solrUrl))
{
throw new SolrImportExportException("Could not construct solr URL for index" + indexName + ", aborting export.");
}
if (!fromDir.exists() || !fromDir.canRead())
{
throw new SolrImportExportException("Source directory " + fromDir
+ " doesn't exist or isn't readable, aborting export of index "
+ indexName);
}
HttpSolrServer solr = new HttpSolrServer(solrUrl);
// must get multivalue fields before clearing
List<String> multivaluedFields = getMultiValuedFields(solr);
if (clear)
{
clearIndex(solrUrl);
}
File[] files = fromDir.listFiles(new FilenameFilter()
{
@Override
public boolean accept(File dir, String name)
{
return name.startsWith(indexName) && name.endsWith(".csv");
}
});
if (files == null || files.length == 0)
{
log.warn("No export files found in directory " + fromDir.getCanonicalPath() + " for index " + indexName);
return;
}
Arrays.sort(files);
for (File file : files)
{
log.info("Importing file " + file.getCanonicalPath());
ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest("/update/csv");
if (overwrite)
{
contentStreamUpdateRequest.setParam("skip", "_version_");
}
for (String mvField : multivaluedFields) {
contentStreamUpdateRequest.setParam("f." + mvField + ".split", "true");
contentStreamUpdateRequest.setParam("f." + mvField + ".escape", "\\");
}
contentStreamUpdateRequest.setParam("stream.contentType", "text/csv;charset=utf-8");
contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
contentStreamUpdateRequest.addFile(file, "text/csv;charset=utf-8");
solr.request(contentStreamUpdateRequest);
}
solr.commit(true, true);
}
/**
* Determine the names of all multi-valued fields from the data in the index.
* @param solr the solr server to query.
* @return A list containing all multi-valued fields, or an empty list if none are found / there aren't any.
*/
private static List<String> getMultiValuedFields(HttpSolrServer solr)
{
List<String> result = new ArrayList<>();
try
{
LukeRequest request = new LukeRequest();
// this needs to be a non-schema request, otherwise we'll miss dynamic fields
LukeResponse response = request.process(solr);
Map<String, LukeResponse.FieldInfo> fields = response.getFieldInfo();
for (LukeResponse.FieldInfo info : fields.values())
{
if (info.getSchema().contains(FieldFlag.MULTI_VALUED.getAbbreviation() + ""))
{
result.add(info.getName());
}
}
}
catch (IOException | SolrServerException e)
{
log.fatal("Cannot determine which fields are multi valued: " + e.getMessage(), e);
}
return result;
}
/**
* Remove all documents from the Solr index with the given URL, then commit and optimise the index.
*
* @throws IOException if there is a problem in communicating with Solr.
* @throws SolrServerException if there is a problem in communicating with Solr.
* @param solrUrl URL of the Solr core to clear.
*/
public static void clearIndex(String solrUrl) throws IOException, SolrServerException
{
HttpSolrServer solr = new HttpSolrServer(solrUrl);
solr.deleteByQuery("*:*");
solr.commit();
solr.optimize();
}
/**
* Exports documents from the given index to the specified target directory in batches of #ROWS_PER_FILE, starting at fromWhen (or all documents).
* See #makeExportFilename for the file names that are generated.
*
* @param indexName The index to export.
* @param toDir The target directory for the export. Will be created if it doesn't exist yet. The directory must be writeable.
* @param solrUrl The solr URL for the index to export. Must not be null.
* @param timeField The time field to use for sorting the export. Must not be null.
* @param fromWhen Optionally, from when to export. See options for allowed values. If null or empty, all documents will be exported.
* @throws SolrServerException if there is a problem with exporting the index.
* @throws IOException if there is a problem creating the files or communicating with Solr.
* @throws SolrImportExportException if there is a problem in communicating with Solr.
*/
public static void exportIndex(String indexName, File toDir, String solrUrl, String timeField, String fromWhen)
throws SolrServerException, IOException, SolrImportExportException
{
if (StringUtils.isBlank(solrUrl))
{
throw new SolrImportExportException("Could not construct solr URL for index" + indexName + ", aborting export.");
}
if (!toDir.exists() || !toDir.canWrite())
{
throw new SolrImportExportException("Target directory " + toDir
+ " doesn't exist or is not writable, aborting export of index "
+ indexName);
}
HttpSolrServer solr = new HttpSolrServer(solrUrl);
SolrQuery query = new SolrQuery("*:*");
if (StringUtils.isNotBlank(fromWhen))
{
String lastValueFilter = makeFilterQuery(timeField, fromWhen);
if (StringUtils.isNotBlank(lastValueFilter))
{
query.addFilterQuery(lastValueFilter);
}
}
query.setRows(0);
query.setGetFieldStatistics(timeField);
Map<String, FieldStatsInfo> fieldInfo = solr.query(query).getFieldStatsInfo();
if (fieldInfo == null || !fieldInfo.containsKey(timeField)) {
log.warn("Cannot get earliest date, not exporting index " + indexName + ", time field " + timeField + ", from " + fromWhen);
return;
}
FieldStatsInfo timeFieldInfo = fieldInfo.get(timeField);
if (timeFieldInfo == null || timeFieldInfo.getMin() == null) {
log.warn("Cannot get earliest date, not exporting index " + indexName + ", time field " + timeField + ", from " + fromWhen);
return;
}
Date earliestTimestamp = (Date) timeFieldInfo.getMin();
query.setGetFieldStatistics(false);
query.clearSorts();
query.setRows(0);
query.setFacet(true);
query.add(FacetParams.FACET_RANGE, timeField);
query.add(FacetParams.FACET_RANGE_START, SOLR_DATE_FORMAT.format(earliestTimestamp) + "/MONTH");
query.add(FacetParams.FACET_RANGE_END, "NOW/MONTH+1MONTH");
query.add(FacetParams.FACET_RANGE_GAP, "+1MONTH");
query.setFacetMinCount(1);
List<RangeFacet.Count> monthFacets = solr.query(query).getFacetRanges().get(0).getCounts();
for (RangeFacet.Count monthFacet : monthFacets) {
Date monthStartDate;
String monthStart = monthFacet.getValue();
try
{
monthStartDate = SOLR_DATE_FORMAT_NO_MS.parse(monthStart);
}
catch (java.text.ParseException e)
{
throw new SolrImportExportException("Could not read start of month batch as date: " + monthStart, e);
}
int docsThisMonth = monthFacet.getCount();
SolrQuery monthQuery = new SolrQuery("*:*");
monthQuery.setRows(ROWS_PER_FILE);
monthQuery.set("wt", "csv");
monthQuery.set("fl", "*");
monthQuery.addFilterQuery(timeField + ":[" +monthStart + " TO " + monthStart + "+1MONTH]");
for (int i = 0; i < docsThisMonth; i+= ROWS_PER_FILE)
{
monthQuery.setStart(i);
URL url = new URL(solrUrl + "/select?" + monthQuery.toString());
File file = new File(toDir.getCanonicalPath(), makeExportFilename(indexName, monthStartDate, docsThisMonth, i));
if (file.createNewFile())
{
FileUtils.copyURLToFile(url, file);
log.info("Exported batch " + i + " to " + file.getCanonicalPath());
}
else
{
throw new SolrImportExportException("Could not create file " + file.getCanonicalPath()
+ " while exporting index " + indexName
+ ", month" + monthStart
+ ", batch " + i);
}
}
}
}
/**
* Return a filter query that represents the export date range passed in as lastValue
* @param timeField the time field to use for the date range
* @param lastValue the requested date range, see options for acceptable values
* @return a filter query representing the date range, or null if no suitable date range can be created.
*/
private static String makeFilterQuery(String timeField, String lastValue) {
if ("m".equals(lastValue))
{
// export data from the previous month
return timeField + ":[NOW/MONTH-1MONTH TO NOW/MONTH]";
}
int days;
if ("d".equals(lastValue))
{
days = 1;
}
else
{
// other acceptable value: a number, specifying how many days back to export
days = Integer.valueOf(lastValue); // TODO check value?
}
return timeField + ":[NOW/DAY-" + days + "DAYS TO " + SOLR_DATE_FORMAT.format(new Date()) + "]";
}
/**
* Return the specified directory name or fall back to a default value.
*
* @param directoryValue a specific directory name. Optional.
* @return directoryValue if given as a non-blank string. A default directory otherwise.
*/
private static String makeDirectoryName(String directoryValue)
{
if (StringUtils.isNotBlank(directoryValue))
{
return directoryValue;
}
return ConfigurationManager.getProperty("dspace.dir") + File.separator + "solr-export" + File.separator;
}
/**
* Creates a filename for the export batch.
*
* @param indexName The name of the index being exported.
* @param exportStart The start timestamp of the export
* @param totalRecords The total number of records in the export.
* @param index The index of the current batch.
* @return A file name that is appropriate to use for exporting the batch of data described by the parameters.
*/
private static String makeExportFilename(String indexName, Date exportStart, long totalRecords, int index)
{
String exportFileNumber = "";
if (totalRecords > ROWS_PER_FILE) {
exportFileNumber = StringUtils.leftPad("" + (index / ROWS_PER_FILE), (int) Math.ceil(Math.log10(totalRecords / ROWS_PER_FILE)), "0");
}
return indexName
+ "_export_"
+ EXPORT_DATE_FORMAT.format(exportStart)
+ (StringUtils.isNotBlank(exportFileNumber) ? "_" + exportFileNumber : "")
+ ".csv";
}
/**
* Returns the full URL for the specified index name.
*
* @param indexName the index name whose Solr URL is required. If the index name starts with
* &quot;statistics&quot; or is &quot;authority&quot;, the Solr base URL will be looked up
* in the corresponding DSpace configuration file. Otherwise, it will fall back to a default.
* @return the full URL to the Solr index, as a String.
*/
private static String makeSolrUrl(String indexName)
{
if (indexName.startsWith("statistics"))
{
// TODO account for year shards properly?
return ConfigurationManager.getProperty("solr-statistics", "server") + indexName.replaceFirst("statistics", "");
}
else if ("authority".equals(indexName))
{
return ConfigurationManager.getProperty("solr.authority.server");
}
return "http://localhost:8080/solr/" + indexName; // TODO better default?
}
/**
* Returns a time field for the specified index name that is suitable for incremental export.
*
* @param indexName the index name whose Solr URL is required.
* @return the name of the time field, or null if no suitable field can be determined.
*/
private static String makeTimeField(String indexName)
{
if (indexName.startsWith("statistics"))
{
return "time";
}
else if ("authority".equals(indexName))
{
return "last_modified_date";
}
return null; // TODO some sort of default?
}
/**
* A utility method to print out all available command-line options and exit given the specified code.
*
* @param options the supported options.
* @param exitCode the exit code to use. The method will call System#exit(int) with the given code.
*/
private static void printHelpAndExit(Options options, int exitCode)
{
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp(SolrImportExport.class.getSimpleName() + "\n", options);
System.exit(exitCode);
}
}

View File

@@ -0,0 +1,24 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.util;
/**
* @author Andrea Schweer schweer@waikato.ac.nz for the LCoNZ Institutional Research Repositories
*/
public class SolrImportExportException extends Exception
{
public SolrImportExportException(String message)
{
super(message);
}
public SolrImportExportException(String message, Throwable cause)
{
super(message, cause);
}
}

View File

@@ -13,7 +13,10 @@ import org.dspace.core.Context;
import org.dspace.storage.bitstore.BitstreamStorageManager; import org.dspace.storage.bitstore.BitstreamStorageManager;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List;
import java.util.Set; import java.util.Set;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.authorize.ResourcePolicy;
/** /**
* *
@@ -46,13 +49,32 @@ public abstract class AbstractVersionProvider {
for(Bundle nativeBundle : nativeItem.getBundles()) for(Bundle nativeBundle : nativeItem.getBundles())
{ {
Bundle bundleNew = itemNew.createBundle(nativeBundle.getName()); Bundle bundleNew = itemNew.createBundle(nativeBundle.getName());
// DSpace knows several types of resource policies (see the class
// org.dspace.authorize.ResourcePolicy): Submission, Workflow, Custom
// and inherited. Submission, Workflow and Inherited policies will be
// set automatically as neccessary. We need to copy the custom policies
// only to preserve customly set policies and embargos (which are
// realized by custom policies with a start date).
List<ResourcePolicy> bundlePolicies =
AuthorizeManager.findPoliciesByDSOAndType(c, nativeBundle, ResourcePolicy.TYPE_CUSTOM);
AuthorizeManager.addPolicies(c, bundlePolicies, bundleNew);
for(Bitstream nativeBitstream : nativeBundle.getBitstreams()) for(Bitstream nativeBitstream : nativeBundle.getBitstreams())
{ {
Bitstream bitstreamNew = createBitstream(c, nativeBitstream); Bitstream bitstreamNew = createBitstream(c, nativeBitstream);
bundleNew.addBitstream(bitstreamNew); bundleNew.addBitstream(bitstreamNew);
// NOTE: bundle.addBitstream() causes Bundle policies to be inherited by default.
// So, we need to REMOVE any inherited TYPE_CUSTOM policies before copying over the correct ones.
AuthorizeManager.removeAllPoliciesByDSOAndType(c, bitstreamNew, ResourcePolicy.TYPE_CUSTOM);
// Now, we need to copy the TYPE_CUSTOM resource policies from old bitstream
// to the new bitstream, like we did above for bundles
List<ResourcePolicy> bitstreamPolicies =
AuthorizeManager.findPoliciesByDSOAndType(c, nativeBitstream, ResourcePolicy.TYPE_CUSTOM);
AuthorizeManager.addPolicies(c, bitstreamPolicies, bitstreamNew);
if(nativeBundle.getPrimaryBitstreamID() == nativeBitstream.getID()) if(nativeBundle.getPrimaryBitstreamID() == nativeBitstream.getID())
{ {
bundleNew.setPrimaryBitstreamID(bitstreamNew.getID()); bundleNew.setPrimaryBitstreamID(bitstreamNew.getID());

View File

@@ -17,6 +17,9 @@ import org.dspace.utils.DSpace;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.authorize.ResourcePolicy;
/** /**
* *
@@ -84,6 +87,15 @@ public class DefaultItemVersionProvider extends AbstractVersionProvider implemen
} catch (IdentifierException e) { } catch (IdentifierException e) {
throw new RuntimeException("Can't create Identifier!"); throw new RuntimeException("Can't create Identifier!");
} }
// DSpace knows several types of resource policies (see the class
// org.dspace.authorize.ResourcePolicy): Submission, Workflow, Custom
// and inherited. Submission, Workflow and Inherited policies will be
// set automatically as neccessary. We need to copy the custom policies
// only to preserve customly set policies and embargos (which are
// realized by custom policies with a start date).
List<ResourcePolicy> policies =
AuthorizeManager.findPoliciesByDSOAndType(c, previousItem, ResourcePolicy.TYPE_CUSTOM);
AuthorizeManager.addPolicies(c, policies, itemNew);
itemNew.update(); itemNew.update();
return itemNew; return itemNew;
}catch (SQLException e) { }catch (SQLException e) {

View File

@@ -210,7 +210,7 @@ public class WorkflowManager
{ {
ArrayList<WorkflowItem> mylist = new ArrayList<WorkflowItem>(); ArrayList<WorkflowItem> mylist = new ArrayList<WorkflowItem>();
String myquery = "SELECT * FROM WorkflowItem WHERE owner= ? "; String myquery = "SELECT * FROM WorkflowItem WHERE owner= ? ORDER BY workflow_id";
TableRowIterator tri = DatabaseManager.queryTable(c, TableRowIterator tri = DatabaseManager.queryTable(c,
"workflowitem", myquery,e.getID()); "workflowitem", myquery,e.getID());
@@ -246,7 +246,7 @@ public class WorkflowManager
String myquery = "SELECT workflowitem.* FROM workflowitem, TaskListItem" + String myquery = "SELECT workflowitem.* FROM workflowitem, TaskListItem" +
" WHERE tasklistitem.eperson_id= ? " + " WHERE tasklistitem.eperson_id= ? " +
" AND tasklistitem.workflow_id=workflowitem.workflow_id"; " AND tasklistitem.workflow_id=workflowitem.workflow_id ORDER BY workflowitem.workflow_id";
TableRowIterator tri = DatabaseManager TableRowIterator tri = DatabaseManager
.queryTable(c, "workflowitem", myquery, e.getID()); .queryTable(c, "workflowitem", myquery, e.getID());

View File

@@ -289,6 +289,8 @@ jsp.dspace-admin.general.eperson = EPerson
jsp.dspace-admin.general.group = Group jsp.dspace-admin.general.group = Group
jsp.dspace-admin.general.group-colon = Group: jsp.dspace-admin.general.group-colon = Group:
jsp.dspace-admin.general.next.button = Next &gt; jsp.dspace-admin.general.next.button = Next &gt;
jsp.dspace-admin.general.policy-end-date-colon = End Date:
jsp.dspace-admin.general.policy-start-date-colon = Start Date:
jsp.dspace-admin.general.remove = Remove jsp.dspace-admin.general.remove = Remove
jsp.dspace-admin.general.save = Save jsp.dspace-admin.general.save = Save
jsp.dspace-admin.general.update = Update jsp.dspace-admin.general.update = Update

View File

@@ -10,16 +10,22 @@ package org.dspace.identifier;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.UUID; import java.util.UUID;
import org.dspace.AbstractUnitTest; import org.dspace.AbstractUnitTest;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.*; import org.dspace.content.*;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.identifier.ezid.DateToYear;
import org.dspace.identifier.ezid.Transform;
import org.dspace.kernel.ServiceManager; import org.dspace.kernel.ServiceManager;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.workflow.WorkflowItem; import org.dspace.workflow.WorkflowItem;
import org.dspace.workflow.WorkflowManager; import org.dspace.workflow.WorkflowManager;
import org.junit.*; import org.junit.*;
import static org.junit.Assert.*; import static org.junit.Assert.*;
/** /**
@@ -29,10 +35,22 @@ import static org.junit.Assert.*;
public class EZIDIdentifierProviderTest public class EZIDIdentifierProviderTest
extends AbstractUnitTest extends AbstractUnitTest
{ {
/** Name of the reserved EZID test authority */ /** Name of the reserved EZID test authority. */
private static final String TEST_SHOULDER = "10.5072/FK2"; private static final String TEST_SHOULDER = "10.5072/FK2";
private static ServiceManager sm = null; /** A sensible metadata crosswalk. */
private static final Map<String, String> aCrosswalk = new HashMap<>();
static {
aCrosswalk.put("datacite.creator", "dc.contributor.author");
aCrosswalk.put("datacite.title", "dc.title");
aCrosswalk.put("datacite.publisher", "dc.publisher");
aCrosswalk.put("datacite.publicationyear", "dc.date.issued");
}
/** A sensible set of metadata transforms. */
private static final Map<String, Transform> crosswalkTransforms = new HashMap();
static {
crosswalkTransforms.put("datacite.publicationyear", new DateToYear());
}
private static ConfigurationService config = null; private static ConfigurationService config = null;
@@ -49,6 +67,9 @@ public class EZIDIdentifierProviderTest
private static void dumpMetadata(Item eyetem) private static void dumpMetadata(Item eyetem)
{ {
if (null == eyetem)
return;
Metadatum[] metadata = eyetem.getMetadata("dc", Item.ANY, Item.ANY, Item.ANY); Metadatum[] metadata = eyetem.getMetadata("dc", Item.ANY, Item.ANY, Item.ANY);
for (Metadatum metadatum : metadata) for (Metadatum metadatum : metadata)
System.out.printf("Metadata: %s.%s.%s(%s) = %s\n", System.out.printf("Metadata: %s.%s.%s(%s) = %s\n",
@@ -89,30 +110,11 @@ public class EZIDIdentifierProviderTest
return item; return item;
} }
/*
@BeforeClass @BeforeClass
public static void setUpClass() public static void setUpClass()
throws Exception throws Exception
{ {
Context ctx = new Context();
ctx.turnOffAuthorisationSystem();
ctx.setCurrentUser(eperson);
// Create an environment for our test objects to live in.
community = Community.create(null, ctx);
community.setMetadata("name", "A Test Community");
community.update();
collection = community.createCollection();
collection.setMetadata("name", "A Test Collection");
collection.update();
ctx.complete();
// Find the usual kernel services // Find the usual kernel services
sm = kernelImpl.getServiceManager();
config = kernelImpl.getConfigurationService(); config = kernelImpl.getConfigurationService();
// Configure the service under test. // Configure the service under test.
@@ -129,71 +131,67 @@ public class EZIDIdentifierProviderTest
throws Exception throws Exception
{ {
System.out.print("Tearing down\n\n"); System.out.print("Tearing down\n\n");
Context ctx = new Context();
dumpMetadata(Item.find(ctx, itemID));
} }
@Before @Before
public void setUp() public void setUp()
throws Exception
{ {
context.setCurrentUser(eperson);
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
// Create an environment for our test objects to live in.
community = Community.create(null, context);
community.setMetadata("name", "A Test Community");
community.update();
collection = community.createCollection();
collection.setMetadata("name", "A Test Collection");
collection.update();
context.commit();
} }
@After @After
public void tearDown() public void tearDown()
throws SQLException
{ {
context.restoreAuthSystemState(); context.restoreAuthSystemState();
}
*/
/** Dummy test. */ dumpMetadata(Item.find(context, itemID));
@Test
public void testNothing()
{
System.out.println("dummy");
} }
/** /**
* Test of supports method, of class DataCiteIdentifierProvider. * Test of supports method, of class DataCiteIdentifierProvider.
*/ */
/*
@Test @Test
public void testSupports_Class() public void testSupports_Class()
{ {
System.out.println("supports Class"); System.out.println("supports Class");
EZIDIdentifierProvider instance EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
= (EZIDIdentifierProvider)
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
Class<? extends Identifier> identifier = DOI.class; Class<? extends Identifier> identifier = DOI.class;
boolean result = instance.supports(identifier); boolean result = instance.supports(identifier);
assertTrue("DOI should be supported", result); assertTrue("DOI is supported", result);
} }
*/
/** /**
* Test of supports method, of class DataCiteIdentifierProvider. * Test of supports method, of class DataCiteIdentifierProvider.
*/ */
/*
@Test @Test
public void testSupports_String() public void testSupports_String()
{ {
System.out.println("supports String"); System.out.println("supports String");
EZIDIdentifierProvider instance EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
= (EZIDIdentifierProvider)
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
String identifier = "doi:" + TEST_SHOULDER; String identifier = "doi:" + TEST_SHOULDER;
boolean result = instance.supports(identifier); boolean result = instance.supports(identifier);
assertTrue(identifier + " should be supported", result); assertTrue(identifier + " is supported", result);
} }
*/
/** /**
* Test of register method, of class DataCiteIdentifierProvider. * Test of register method, of class EZIDIdentifierProvider.
*/ */
/* /*
@Test @Test
@@ -202,9 +200,7 @@ public class EZIDIdentifierProviderTest
{ {
System.out.println("register Context, DSpaceObject"); System.out.println("register Context, DSpaceObject");
EZIDIdentifierProvider instance EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
= (EZIDIdentifierProvider)
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
DSpaceObject dso = newItem(context); DSpaceObject dso = newItem(context);
@@ -224,9 +220,7 @@ public class EZIDIdentifierProviderTest
{ {
System.out.println("register 3"); System.out.println("register 3");
EZIDIdentifierProvider instance EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
= (EZIDIdentifierProvider)
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
DSpaceObject object = newItem(context); DSpaceObject object = newItem(context);
@@ -246,9 +240,7 @@ public class EZIDIdentifierProviderTest
{ {
System.out.println("reserve"); System.out.println("reserve");
EZIDIdentifierProvider instance EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
= (EZIDIdentifierProvider)
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
DSpaceObject dso = newItem(context); DSpaceObject dso = newItem(context);
String identifier = UUID.randomUUID().toString(); String identifier = UUID.randomUUID().toString();
@@ -266,13 +258,11 @@ public class EZIDIdentifierProviderTest
{ {
System.out.println("mint"); System.out.println("mint");
EZIDIdentifierProvider instance EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
= (EZIDIdentifierProvider)
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
DSpaceObject dso = newItem(context); DSpaceObject dso = newItem(context);
String result = instance.mint(context, dso); String result = instance.mint(context, dso);
assertNotNull("Null returned", result); assertNotNull("Non-null returned", result);
} }
*/ */
@@ -286,9 +276,7 @@ public class EZIDIdentifierProviderTest
{ {
System.out.println("resolve"); System.out.println("resolve");
EZIDIdentifierProvider instance EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
= (EZIDIdentifierProvider)
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
String identifier = UUID.randomUUID().toString(); String identifier = UUID.randomUUID().toString();
DSpaceObject expResult = newItem(context); DSpaceObject expResult = newItem(context);
@@ -310,9 +298,7 @@ public class EZIDIdentifierProviderTest
{ {
System.out.println("lookup"); System.out.println("lookup");
EZIDIdentifierProvider instance EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
= (EZIDIdentifierProvider)
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
String identifier = UUID.randomUUID().toString(); String identifier = UUID.randomUUID().toString();
DSpaceObject object = newItem(context); DSpaceObject object = newItem(context);
@@ -333,9 +319,7 @@ public class EZIDIdentifierProviderTest
{ {
System.out.println("delete 2"); System.out.println("delete 2");
EZIDIdentifierProvider instance EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
= (EZIDIdentifierProvider)
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
DSpaceObject dso = newItem(context); DSpaceObject dso = newItem(context);
@@ -370,18 +354,16 @@ public class EZIDIdentifierProviderTest
*/ */
/** /**
* Test of delete method, of class DataCiteIdentifierProvider. * Test of delete method, of class EZIDIdentifierProvider.
*/ */
/* /*
@Test() @Test
public void testDelete_3args() public void testDelete_3args()
throws Exception throws Exception
{ {
System.out.println("delete 3"); System.out.println("delete 3");
EZIDIdentifierProvider instance EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
= (EZIDIdentifierProvider)
sm.getServicesByType(EZIDIdentifierProvider.class).get(0);
DSpaceObject dso = newItem(context); DSpaceObject dso = newItem(context);
String identifier = UUID.randomUUID().toString(); String identifier = UUID.randomUUID().toString();
@@ -400,4 +382,45 @@ public class EZIDIdentifierProviderTest
assertFalse("Test identifier is still present", found.hasNext()); assertFalse("Test identifier is still present", found.hasNext());
} }
*/ */
/**
* Test of crosswalkMetadata method, of class EZIDIdentifierProvider.
* @throws Exception
*/
@Test
public void testCrosswalkMetadata()
throws Exception
{
System.out.println("crosswalkMetadata");
// Set up the instance to be tested
EZIDIdentifierProvider instance = new EZIDIdentifierProvider();
instance.setConfigurationService(config);
instance.setCrosswalk(aCrosswalk);
instance.setCrosswalkTransform(crosswalkTransforms);
// Let's have a fresh Item to work with
DSpaceObject dso = newItem(context);
String handle = dso.getHandle();
// Test!
Map<String, String> metadata = instance.crosswalkMetadata(dso);
// Evaluate
String target = (String) metadata.get("_target");
assertEquals("Generates correct _target metadatum",
config.getProperty("dspace.url") + "/handle/" + handle,
target);
assertTrue("Has title", metadata.containsKey("datacite.title"));
assertTrue("Has publication year", metadata.containsKey("datacite.publicationyear"));
assertTrue("Has publisher", metadata.containsKey("datacite.publisher"));
assertTrue("Has creator", metadata.containsKey("datacite.creator"));
// Dump out the generated metadata for inspection
System.out.println("Results:");
for (Entry metadatum : metadata.entrySet())
{
System.out.printf(" %s : %s\n", metadatum.getKey(), metadatum.getValue());
}
}
} }

View File

@@ -13,7 +13,7 @@
<parent> <parent>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<version>5.0</version> <version>5.3</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>

View File

@@ -225,6 +225,8 @@ public class DiscoverUtility
String query = request.getParameter("query"); String query = request.getParameter("query");
if (StringUtils.isNotBlank(query)) if (StringUtils.isNotBlank(query))
{ {
// Escape any special characters in this user-entered query
query = SearchUtils.getSearchService().escapeQueryChars(query);
queryArgs.setQuery(query); queryArgs.setQuery(query);
} }

View File

@@ -981,6 +981,8 @@ public class ItemTag extends TagSupport
. getBitstreamByName(tName); . getBitstreamByName(tName);
if (tb != null) if (tb != null)
{
if (AuthorizeManager.authorizeActionBoolean(context, tb, Constants.READ))
{ {
String myPath = request.getContextPath() String myPath = request.getContextPath()
+ "/retrieve/" + "/retrieve/"
@@ -997,6 +999,7 @@ public class ItemTag extends TagSupport
+ "\" /></a><br />"); + "\" /></a><br />");
} }
} }
}
out.print("<a class=\"btn btn-primary\" "); out.print("<a class=\"btn btn-primary\" ");
out out

View File

@@ -63,30 +63,7 @@ public class SelectCollectionTag extends TagSupport
{ {
HttpServletRequest hrq = (HttpServletRequest) pageContext.getRequest(); HttpServletRequest hrq = (HttpServletRequest) pageContext.getRequest();
Context context = UIUtil.obtainContext(hrq); Context context = UIUtil.obtainContext(hrq);
Map<Community, List<Collection>> commCollList = new LinkedHashMap<Community, List<Collection>>(); Collection[] collections = (Collection[]) hrq.getAttribute("collections");
for (Community topcommunity : Community.findAllTop(context))
{
for (Collection collection : topcommunity.getCollections())
{
List<Collection> clist = null;
if (commCollList.containsKey(topcommunity))
{
clist = commCollList.get(topcommunity);
}
else
{
clist = new ArrayList<Collection>();
}
clist.add(collection);
commCollList.put(topcommunity, clist);
}
for (Community subcommunity : topcommunity.getSubcommunities())
{
addCommCollList(subcommunity, commCollList);
}
}
sb.append("<select"); sb.append("<select");
if (name != null) if (name != null)
@@ -109,12 +86,7 @@ public class SelectCollectionTag extends TagSupport
if (collection == -1) sb.append(" selected=\"selected\""); if (collection == -1) sb.append(" selected=\"selected\"");
sb.append(">").append(firstOption).append("</option>\n"); sb.append(">").append(firstOption).append("</option>\n");
Iterator<Community> iter = commCollList.keySet().iterator(); for (Collection coll : collections)
while(iter.hasNext())
{
Community comm = iter.next();
//sb.append("<optgroup label=\"").append(getCommName(comm)).append("\">\n");
for (Collection coll : commCollList.get(comm))
{ {
sb.append("<option value=\"").append(coll.getID()).append("\""); sb.append("<option value=\"").append(coll.getID()).append("\"");
if (collection == coll.getID()) if (collection == coll.getID())
@@ -123,8 +95,7 @@ public class SelectCollectionTag extends TagSupport
} }
sb.append(">").append(CollectionDropDown.collectionPath(coll)).append("</option>\n"); sb.append(">").append(CollectionDropDown.collectionPath(coll)).append("</option>\n");
} }
//sb.append("</optgroup>\n");
}
sb.append("</select>\n"); sb.append("</select>\n");
out.print(sb.toString()); out.print(sb.toString());
@@ -141,45 +112,6 @@ public class SelectCollectionTag extends TagSupport
return SKIP_BODY; return SKIP_BODY;
} }
private void addCommCollList(Community community, Map<Community,
List<Collection>> commCollList) throws SQLException
{
for (Collection collection : community.getCollections())
{
List<Collection> clist = null;
if (commCollList.containsKey(community))
{
clist = commCollList.get(community);
}
else
{
clist = new ArrayList<Collection>();
}
clist.add(collection);
commCollList.put(community, clist);
}
for (Community subcommunity : community.getSubcommunities())
{
addCommCollList(subcommunity, commCollList);
}
}
private String getCommName(Community community) throws SQLException
{
StringBuffer sb = new StringBuffer("");
Community[] parents = community.getAllParents();
for (Community parent : parents)
{
sb.insert(0, parent.getMetadata("name")+"/");
}
sb.append(community.getMetadata("name"));
return sb.toString().substring(1);
}
public String getKlass() public String getKlass()
{ {
return klass; return klass;

View File

@@ -14,6 +14,7 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.app.webui.util.UIUtil; import org.dspace.app.webui.util.UIUtil;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
@@ -31,6 +32,7 @@ import org.dspace.content.Community;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.LogManager; import org.dspace.core.LogManager;
import org.dspace.core.Utils;
import org.dspace.discovery.configuration.TagCloudConfiguration; import org.dspace.discovery.configuration.TagCloudConfiguration;
/** /**
@@ -88,6 +90,25 @@ public abstract class AbstractBrowserServlet extends DSpaceServlet
String month = request.getParameter("month"); String month = request.getParameter("month");
String year = request.getParameter("year"); String year = request.getParameter("year");
String startsWith = request.getParameter("starts_with"); String startsWith = request.getParameter("starts_with");
//validate input to avoid cross-site scripting
try {
if (StringUtils.isNotBlank(month) && !"-1".equals(month)) {
Integer.valueOf(month);
}
if (StringUtils.isNotBlank(year) && !"-1".equals(year)) {
Integer.valueOf(year);
}
if(StringUtils.isNotBlank(startsWith)) {
startsWith = Utils.addEntities(startsWith);
}
}
catch(Exception ex) {
log.warn("We were unable to parse the browse request: maybe a cross-site scripting attach?");
return null;
}
String valueFocus = request.getParameter("vfocus"); String valueFocus = request.getParameter("vfocus");
String valueFocusLang = request.getParameter("vfocus_lang"); String valueFocusLang = request.getParameter("vfocus_lang");
String authority = request.getParameter("authority"); String authority = request.getParameter("authority");
@@ -110,12 +131,14 @@ public abstract class AbstractBrowserServlet extends DSpaceServlet
// process the input, performing some inline validation // process the input, performing some inline validation
BrowseIndex bi = null; BrowseIndex bi = null;
if (type != null && !"".equals(type)) if (StringUtils.isNotEmpty(type))
{ {
bi = BrowseIndex.getBrowseIndex(type); bi = BrowseIndex.getBrowseIndex(type);
} }
if (bi == null) // don't override a requested index, if no index is set,
// try to find it on a possibly specified sort option.
if (type == null && bi == null)
{ {
if (sortBy > 0) if (sortBy > 0)
{ {
@@ -168,7 +191,7 @@ public abstract class AbstractBrowserServlet extends DSpaceServlet
} }
// if no resultsperpage set, default to 20 - if tag cloud enabled, leave it as is! // if no resultsperpage set, default to 20 - if tag cloud enabled, leave it as is!
if (resultsperpage < 0 && !bi.isTagCloudEnabled()) if (bi != null && resultsperpage < 0 && !bi.isTagCloudEnabled())
{ {
resultsperpage = 20; resultsperpage = 20;
} }

View File

@@ -65,9 +65,16 @@ public class BrowserServlet extends AbstractBrowserServlet
// all browse requests currently come to GET. // all browse requests currently come to GET.
BrowserScope scope = getBrowserScopeForRequest(context, request, response); BrowserScope scope = getBrowserScopeForRequest(context, request, response);
if (scope.getBrowseIndex() == null) if (scope == null || scope.getBrowseIndex() == null)
{ {
throw new ServletException("There is no browse index for the request"); String requestURL = request.getRequestURI();
if (request.getQueryString() != null)
{
requestURL += "?" + request.getQueryString();
}
log.warn("We were unable to parse the browse request (e.g. an unconfigured index or sort option was used). Will send a 400 Bad Request. Requested URL was: " + requestURL);
response.sendError(HttpServletResponse.SC_BAD_REQUEST);
return;
} }
// Is this a request to export the metadata, or a normal browse request? // Is this a request to export the metadata, or a normal browse request?

View File

@@ -55,7 +55,7 @@ public class DisplayStatisticsServlet extends DSpaceServlet
{ {
// is the statistics data publically viewable? // is the statistics data publically viewable?
boolean privatereport = ConfigurationManager.getBooleanProperty("usage-statistics", "authorization.admin"); boolean privatereport = ConfigurationManager.getBooleanProperty("usage-statistics", "authorization.admin.usage");
// is the user a member of the Administrator (1) group? // is the user a member of the Administrator (1) group?
boolean admin = Group.isMember(context, 1); boolean admin = Group.isMember(context, 1);

View File

@@ -19,6 +19,7 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.app.util.GoogleMetadata; import org.dspace.app.util.GoogleMetadata;
import org.dspace.app.webui.util.Authenticate; import org.dspace.app.webui.util.Authenticate;
@@ -129,8 +130,7 @@ public class HandleServlet extends DSpaceServlet
if (dso == null) if (dso == null)
{ {
log.info(LogManager log.info(LogManager.getHeader(context, "invalid_id", "path=" + path));
.getHeader(context, "invalid_id", "path=" + path));
JSPManager.showInvalidIDError(request, response, StringEscapeUtils.escapeHtml(path), -1); JSPManager.showInvalidIDError(request, response, StringEscapeUtils.escapeHtml(path), -1);
return; return;
@@ -152,7 +152,47 @@ public class HandleServlet extends DSpaceServlet
// and firing a usage event for the DSO we're reporting for // and firing a usage event for the DSO we're reporting for
return; return;
} else if ("/display-statistics.jsp".equals(extraPathInfo))
{
request.getRequestDispatcher(extraPathInfo).forward(request, response);
// If we don't return here, we keep processing and end up
// throwing a NPE when checking community authorization
// and firing a usage event for the DSO we're reporting for
return;
} else if ("/browse".equals((extraPathInfo)) || StringUtils.startsWith(extraPathInfo, "/browse?")) {
// Add the location if we got a community or collection
if (dso instanceof Community)
{
Community c = (Community) dso;
request.setAttribute("dspace.community", c);
} else if (dso instanceof Collection)
{
Collection c = (Collection) dso;
request.setAttribute("dspace.collection", c);
} }
request.getRequestDispatcher(extraPathInfo).forward(request, response);
// If we don't return here, we keep processing and end up
// throwing a NPE when checking community authorization
// and firing a usage event for the DSO we're reporting for
return;
} else if ("/simple-search".equals(extraPathInfo) || StringUtils.startsWith(extraPathInfo, "simple-search?")) {
// Add the location if we got a community or collection
if (dso instanceof Community)
{
Community c = (Community) dso;
request.setAttribute("dspace.community", c);
} else if (dso instanceof Collection)
{
Collection c = (Collection) dso;
request.setAttribute("dspace.collection", c);
}
request.getRequestDispatcher(extraPathInfo).forward(request, response);
// If we don't return here, we keep processing and end up
// throwing a NPE when checking community authorization
// and firing a usage event for the DSO we're reporting for
return;
}
// OK, we have a valid Handle. What is it? // OK, we have a valid Handle. What is it?
if (dso.getType() == Constants.ITEM) if (dso.getType() == Constants.ITEM)
@@ -195,9 +235,9 @@ public class HandleServlet extends DSpaceServlet
} }
else else
{ {
// Forward to another servlet log.debug("Found Item with extraPathInfo => Error.");
request.getRequestDispatcher(extraPathInfo).forward(request, JSPManager.showInvalidIDError(request, response, StringEscapeUtils.escapeHtml(path), -1);
response); return;
} }
} }
@@ -231,9 +271,9 @@ public class HandleServlet extends DSpaceServlet
} }
else else
{ {
// Forward to another servlet log.debug("Found Collection with extraPathInfo => Error.");
request.getRequestDispatcher(extraPathInfo).forward(request, JSPManager.showInvalidIDError(request, response, StringEscapeUtils.escapeHtml(path), -1);
response); return;
} }
} }
else if (dso.getType() == Constants.COMMUNITY) else if (dso.getType() == Constants.COMMUNITY)
@@ -255,9 +295,9 @@ public class HandleServlet extends DSpaceServlet
} }
else else
{ {
// Forward to another servlet log.debug("Found Community with extraPathInfo => Error.");
request.getRequestDispatcher(extraPathInfo).forward(request, JSPManager.showInvalidIDError(request, response, StringEscapeUtils.escapeHtml(path), -1);
response); return;
} }
} }
else else

View File

@@ -9,6 +9,7 @@ package org.dspace.app.webui.servlet.admin;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Date;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@@ -16,6 +17,7 @@ import java.util.Map;
import javax.servlet.ServletException; import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.time.DateUtils;
import org.dspace.app.util.AuthorizeUtil; import org.dspace.app.util.AuthorizeUtil;
import org.dspace.app.webui.servlet.DSpaceServlet; import org.dspace.app.webui.servlet.DSpaceServlet;
@@ -493,6 +495,20 @@ public class AuthorizeAdminServlet extends DSpaceServlet
.getIntParameter(request, "collection_id"); .getIntParameter(request, "collection_id");
int communityId = UIUtil.getIntParameter(request, "community_id"); int communityId = UIUtil.getIntParameter(request, "community_id");
int itemId = UIUtil.getIntParameter(request, "item_id"); int itemId = UIUtil.getIntParameter(request, "item_id");
Date startDate = null;
try {
startDate = DateUtils.parseDate(request.getParameter("policy_start_date"),
new String[]{"yyyy-MM-dd", "yyyy-MM", "yyyy"});
} catch (Exception ex) {
//Ignore start date is already null
}
Date endDate = null;
try {
endDate = DateUtils.parseDate(request.getParameter("policy_end_date"),
new String[]{"yyyy-MM-dd", "yyyy-MM", "yyyy"});
} catch (Exception ex) {
//Ignore end date is already null
}
Item item = null; Item item = null;
Collection collection = null; Collection collection = null;
@@ -574,6 +590,11 @@ public class AuthorizeAdminServlet extends DSpaceServlet
// modify the policy // modify the policy
policy.setAction(actionId); policy.setAction(actionId);
policy.setGroup(group); policy.setGroup(group);
// start and end dates are used for Items and Bitstreams only.
// Set start and end date even if they are null to be able to
// delete previously set dates.
policy.setStartDate(startDate);
policy.setEndDate(endDate);
policy.update(); policy.update();
// show edit form! // show edit form!

View File

@@ -38,6 +38,7 @@
<%@ page import="org.dspace.content.*"%> <%@ page import="org.dspace.content.*"%>
<%@ page import="org.dspace.core.ConfigurationManager"%> <%@ page import="org.dspace.core.ConfigurationManager"%>
<%@ page import="org.dspace.core.Context" %> <%@ page import="org.dspace.core.Context" %>
<%@ page import="org.dspace.core.Utils" %>
<%@ page import="org.dspace.eperson.Group" %> <%@ page import="org.dspace.eperson.Group" %>
<%@ page import="javax.servlet.jsp.jstl.fmt.LocaleSupport" %> <%@ page import="javax.servlet.jsp.jstl.fmt.LocaleSupport" %>
<%@ page import="java.net.URLEncoder" %> <%@ page import="java.net.URLEncoder" %>
@@ -377,7 +378,7 @@
{ {
if (dcv.length > 0) if (dcv.length > 0)
{ {
displayTitle = dcv[0].value; displayTitle = Utils.addEntities(dcv[0].value);
} }
} }
%><p class="recentItem"><a href="<%= request.getContextPath() %>/handle/<%= items[i].getHandle() %>"><%= displayTitle %></a></p><% %><p class="recentItem"><a href="<%= request.getContextPath() %>/handle/<%= items[i].getHandle() %>"><%= displayTitle %></a></p><%

View File

@@ -32,6 +32,7 @@
<%@ page import="org.dspace.browse.ItemCounter" %> <%@ page import="org.dspace.browse.ItemCounter" %>
<%@ page import="org.dspace.content.*" %> <%@ page import="org.dspace.content.*" %>
<%@ page import="org.dspace.core.ConfigurationManager" %> <%@ page import="org.dspace.core.ConfigurationManager" %>
<%@ page import="org.dspace.core.Utils" %>
<%@ page import="javax.servlet.jsp.jstl.fmt.LocaleSupport" %> <%@ page import="javax.servlet.jsp.jstl.fmt.LocaleSupport" %>
<% <%
@@ -157,7 +158,7 @@
{ {
if (dcv.length > 0) if (dcv.length > 0)
{ {
displayTitle = dcv[0].value; displayTitle = Utils.addEntities(dcv[0].value);
} }
} }
%> %>

View File

@@ -14,6 +14,7 @@
The add-on may be turn off in dspace.cfg The add-on may be turn off in dspace.cfg
--%> --%>
<%@page import="org.dspace.core.Utils"%>
<%@ page contentType="text/html;charset=UTF-8" %> <%@ page contentType="text/html;charset=UTF-8" %>
<%@ page import="javax.servlet.jsp.jstl.fmt.LocaleSupport" %> <%@ page import="javax.servlet.jsp.jstl.fmt.LocaleSupport" %>
<%@ taglib uri="/WEB-INF/dspace-tags.tld" prefix="dspace" %> <%@ taglib uri="/WEB-INF/dspace-tags.tld" prefix="dspace" %>
@@ -52,7 +53,7 @@
<form name="filterVocabulary" method="post" action="<%= request.getContextPath() %>/subject-search"> <form name="filterVocabulary" method="post" action="<%= request.getContextPath() %>/subject-search">
<input style="border-width:1px;border-style:solid;" <input style="border-width:1px;border-style:solid;"
name="filter" type="text" id="filter" name="filter" type="text" id="filter"
size="15" value="<%= filter %>" size="15" value="<%= Utils.addEntities(filter) %>"
title="<%= LocaleSupport.getLocalizedMessage(pageContext, "jsp.controlledvocabulary.search.trimmessage") %>"/> title="<%= LocaleSupport.getLocalizedMessage(pageContext, "jsp.controlledvocabulary.search.trimmessage") %>"/>
<input type="submit" name="submit" value="<%= LocaleSupport.getLocalizedMessage(pageContext, "jsp.controlledvocabulary.search.trimbutton") %>"/> <input type="submit" name="submit" value="<%= LocaleSupport.getLocalizedMessage(pageContext, "jsp.controlledvocabulary.search.trimbutton") %>"/>
<input type="hidden" name="action" value="filter"/> <input type="hidden" name="action" value="filter"/>

View File

@@ -8,6 +8,7 @@
--%> --%>
<%@page import="org.apache.commons.lang.time.DateFormatUtils"%>
<%-- <%--
- policy editor - for new or existing policies - policy editor - for new or existing policies
- -
@@ -25,8 +26,8 @@
- "id_name" - name/value passed in from id_name/id above - "id_name" - name/value passed in from id_name/id above
- group_id - set if user selected a group - group_id - set if user selected a group
- eperson_id - set if user selected an eperson - eperson_id - set if user selected an eperson
- start_date - not set, unused - start_date - start date of a policy (e.g. for embargo feature)
- end_date - not set, unused - end_date - end date of a policy
- action_id - set to whatever user chose - action_id - set to whatever user chose
- (new policy) - set to a the string passed in above if policy is a new one - (new policy) - set to a the string passed in above if policy is a new one
--%> --%>
@@ -118,6 +119,28 @@
} %> } %>
</select> </select>
</span> </span>
<%
// start and end dates are used for Items and Bitstreams only.
if (resourceType == Constants.ITEM || resourceType == Constants.BITSTREAM)
{
%>
<!-- policy start date -->
<span class="col-md-2">
<label for="t_start_date_id"><fmt:message key="jsp.dspace-admin.general.policy-start-date-colon"/></label>
</span>
<span class="col-md-10">
<input class="form-control" name="policy_start_date" maxlength="10" size="10" type="text"
value="<%= policy.getStartDate() != null ? DateFormatUtils.format(policy.getStartDate(), "yyyy-MM-dd") : "" %>" />
</span>
<!-- policy end date -->
<span class="col-md-2">
<label for="t_end_date_id"><fmt:message key="jsp.dspace-admin.general.policy-end-date-colon"/></label>
</span>
<span class="col-md-10">
<input class="form-control" name="policy_end_date" maxlength="10" size="10" type="text"
value="<%= policy.getEndDate() != null ? DateFormatUtils.format(policy.getEndDate(), "yyyy-MM-dd") : "" %>" />
</span>
<%} // if Item||Bitstream%>
</div> </div>
<% if( newpolicy != null ) { %> <input name="newpolicy" type="hidden" value="<%=newpolicy%>"/> <% } %> <% if( newpolicy != null ) { %> <input name="newpolicy" type="hidden" value="<%=newpolicy%>"/> <% } %>

View File

@@ -15,6 +15,7 @@
- recent.submissions - RecetSubmissions - recent.submissions - RecetSubmissions
--%> --%>
<%@page import="org.dspace.core.Utils"%>
<%@page import="org.dspace.content.Bitstream"%> <%@page import="org.dspace.content.Bitstream"%>
<%@ page contentType="text/html;charset=UTF-8" %> <%@ page contentType="text/html;charset=UTF-8" %>
@@ -113,13 +114,13 @@ if (submissions != null && submissions.count() > 0)
String displayTitle = "Untitled"; String displayTitle = "Untitled";
if (dcv != null & dcv.length > 0) if (dcv != null & dcv.length > 0)
{ {
displayTitle = dcv[0].value; displayTitle = Utils.addEntities(dcv[0].value);
} }
dcv = item.getMetadata("dc", "description", "abstract", Item.ANY); dcv = item.getMetadata("dc", "description", "abstract", Item.ANY);
String displayAbstract = ""; String displayAbstract = "";
if (dcv != null & dcv.length > 0) if (dcv != null & dcv.length > 0)
{ {
displayAbstract = dcv[0].value; displayAbstract = Utils.addEntities(dcv[0].value);
} }
%> %>
<div style="padding-bottom: 50px; min-height: 200px;" class="item <%= first?"active":""%>"> <div style="padding-bottom: 50px; min-height: 200px;" class="item <%= first?"active":""%>">

View File

@@ -106,7 +106,7 @@
{ {
String key = "jsp.search.advanced.type." + index; String key = "jsp.search.advanced.type." + index;
%> %>
<option value="<%= index %>" <%= field1.equals(index) ? "selected=\"selected\"" : "" %>><fmt:message key="<%= key %>"/></option> <option value="<%= StringEscapeUtils.escapeHtml(index) %>" <%= field1.equals(index) ? "selected=\"selected\"" : "" %>><fmt:message key="<%= key %>"/></option>
<% <%
} }
%> %>
@@ -136,7 +136,7 @@
{ {
String key = "jsp.search.advanced.type." + index; String key = "jsp.search.advanced.type." + index;
%> %>
<option value="<%= index %>" <%= field2.equals(index) ? "selected=\"selected\"" : "" %>><fmt:message key="<%= key %>"/></option> <option value="<%= StringEscapeUtils.escapeHtml(index) %>" <%= field2.equals(index) ? "selected=\"selected\"" : "" %>><fmt:message key="<%= key %>"/></option>
<% <%
} }
%> %>
@@ -162,7 +162,7 @@
{ {
String key = "jsp.search.advanced.type." + index; String key = "jsp.search.advanced.type." + index;
%> %>
<option value="<%= index %>" <%= field3.equals(index) ? "selected=\"selected\"" : "" %>><fmt:message key="<%= key %>"/></option> <option value="<%= StringEscapeUtils.escapeHtml(index) %>" <%= field3.equals(index) ? "selected=\"selected\"" : "" %>><fmt:message key="<%= key %>"/></option>
<% <%
} }
%> %>

View File

@@ -33,6 +33,7 @@
- admin_button - If the user is an admin - admin_button - If the user is an admin
--%> --%>
<%@page import="org.dspace.core.Utils"%>
<%@page import="org.dspace.discovery.configuration.DiscoverySearchFilterFacet"%> <%@page import="org.dspace.discovery.configuration.DiscoverySearchFilterFacet"%>
<%@page import="org.dspace.app.webui.util.UIUtil"%> <%@page import="org.dspace.app.webui.util.UIUtil"%>
<%@page import="java.util.HashMap"%> <%@page import="java.util.HashMap"%>
@@ -55,7 +56,6 @@
prefix="c" %> prefix="c" %>
<%@ taglib uri="http://www.dspace.org/dspace-tags.tld" prefix="dspace" %> <%@ taglib uri="http://www.dspace.org/dspace-tags.tld" prefix="dspace" %>
<%@ page import="org.apache.commons.lang.StringEscapeUtils" %>
<%@ page import="java.net.URLEncoder" %> <%@ page import="java.net.URLEncoder" %>
<%@ page import="org.dspace.content.Community" %> <%@ page import="org.dspace.content.Community" %>
<%@ page import="org.dspace.content.Collection" %> <%@ page import="org.dspace.content.Collection" %>
@@ -192,10 +192,10 @@
} }
%> </select><br/> %> </select><br/>
<label for="query"><fmt:message key="jsp.search.results.searchfor"/></label> <label for="query"><fmt:message key="jsp.search.results.searchfor"/></label>
<input type="text" size="50" id="query" name="query" value="<%= (query==null ? "" : StringEscapeUtils.escapeHtml(query)) %>"/> <input type="text" size="50" id="query" name="query" value="<%= (query==null ? "" : Utils.addEntities(query)) %>"/>
<input type="submit" id="main-query-submit" class="btn btn-primary" value="<fmt:message key="jsp.general.go"/>" /> <input type="submit" id="main-query-submit" class="btn btn-primary" value="<fmt:message key="jsp.general.go"/>" />
<% if (StringUtils.isNotBlank(spellCheckQuery)) {%> <% if (StringUtils.isNotBlank(spellCheckQuery)) {%>
<p class="lead"><fmt:message key="jsp.search.didyoumean"><fmt:param><a id="spellCheckQuery" data-spell="<%= StringEscapeUtils.escapeHtml(spellCheckQuery) %>" href="#"><%= spellCheckQuery %></a></fmt:param></fmt:message></p> <p class="lead"><fmt:message key="jsp.search.didyoumean"><fmt:param><a id="spellCheckQuery" data-spell="<%= Utils.addEntities(spellCheckQuery) %>" href="#"><%= spellCheckQuery %></a></fmt:param></fmt:message></p>
<% } %> <% } %>
<input type="hidden" value="<%= rpp %>" name="rpp" /> <input type="hidden" value="<%= rpp %>" name="rpp" />
<input type="hidden" value="<%= sortedBy %>" name="sort_by" /> <input type="hidden" value="<%= sortedBy %>" name="sort_by" />
@@ -214,7 +214,7 @@
for (DiscoverySearchFilter searchFilter : availableFilters) for (DiscoverySearchFilter searchFilter : availableFilters)
{ {
String fkey = "jsp.search.filter."+searchFilter.getIndexFieldName(); String fkey = "jsp.search.filter."+searchFilter.getIndexFieldName();
%><option value="<%= searchFilter.getIndexFieldName() %>"<% %><option value="<%= Utils.addEntities(searchFilter.getIndexFieldName()) %>"<%
if (filter[0].equals(searchFilter.getIndexFieldName())) if (filter[0].equals(searchFilter.getIndexFieldName()))
{ {
%> selected="selected"<% %> selected="selected"<%
@@ -225,7 +225,7 @@
if (!found) if (!found)
{ {
String fkey = "jsp.search.filter."+filter[0]; String fkey = "jsp.search.filter."+filter[0];
%><option value="<%= filter[0] %>" selected="selected"><fmt:message key="<%= fkey %>"/></option><% %><option value="<%= Utils.addEntities(filter[0]) %>" selected="selected"><fmt:message key="<%= fkey %>"/></option><%
} }
%> %>
</select> </select>
@@ -234,11 +234,11 @@
for (String opt : options) for (String opt : options)
{ {
String fkey = "jsp.search.filter.op."+opt; String fkey = "jsp.search.filter.op."+opt;
%><option value="<%= opt %>"<%= opt.equals(filter[1])?" selected=\"selected\"":"" %>><fmt:message key="<%= fkey %>"/></option><% %><option value="<%= Utils.addEntities(opt) %>"<%= opt.equals(filter[1])?" selected=\"selected\"":"" %>><fmt:message key="<%= fkey %>"/></option><%
} }
%> %>
</select> </select>
<input type="text" id="filter_value_<%=idx %>" name="filter_value_<%=idx %>" value="<%= StringEscapeUtils.escapeHtml(filter[2]) %>" size="45"/> <input type="text" id="filter_value_<%=idx %>" name="filter_value_<%=idx %>" value="<%= Utils.addEntities(filter[2]) %>" size="45"/>
<input class="btn btn-default" type="submit" id="submit_filter_remove_<%=idx %>" name="submit_filter_remove_<%=idx %>" value="X" /> <input class="btn btn-default" type="submit" id="submit_filter_remove_<%=idx %>" name="submit_filter_remove_<%=idx %>" value="X" />
<br/> <br/>
<% <%
@@ -255,17 +255,17 @@
<h5><fmt:message key="jsp.search.filter.heading" /></h5> <h5><fmt:message key="jsp.search.filter.heading" /></h5>
<p class="discovery-search-filters-hint"><fmt:message key="jsp.search.filter.hint" /></p> <p class="discovery-search-filters-hint"><fmt:message key="jsp.search.filter.hint" /></p>
<form action="simple-search" method="get"> <form action="simple-search" method="get">
<input type="hidden" value="<%= StringEscapeUtils.escapeHtml(searchScope) %>" name="location" /> <input type="hidden" value="<%= Utils.addEntities(searchScope) %>" name="location" />
<input type="hidden" value="<%= StringEscapeUtils.escapeHtml(query) %>" name="query" /> <input type="hidden" value="<%= Utils.addEntities(query) %>" name="query" />
<% if (appliedFilterQueries.size() > 0 ) { <% if (appliedFilterQueries.size() > 0 ) {
int idx = 1; int idx = 1;
for (String[] filter : appliedFilters) for (String[] filter : appliedFilters)
{ {
boolean found = false; boolean found = false;
%> %>
<input type="hidden" id="filter_field_<%=idx %>" name="filter_field_<%=idx %>" value="<%= filter[0] %>" /> <input type="hidden" id="filter_field_<%=idx %>" name="filter_field_<%=idx %>" value="<%= Utils.addEntities(filter[0]) %>" />
<input type="hidden" id="filter_type_<%=idx %>" name="filter_type_<%=idx %>" value="<%= filter[1] %>" /> <input type="hidden" id="filter_type_<%=idx %>" name="filter_type_<%=idx %>" value="<%= Utils.addEntities(filter[1]) %>" />
<input type="hidden" id="filter_value_<%=idx %>" name="filter_value_<%=idx %>" value="<%= StringEscapeUtils.escapeHtml(filter[2]) %>" /> <input type="hidden" id="filter_value_<%=idx %>" name="filter_value_<%=idx %>" value="<%= Utils.addEntities(filter[2]) %>" />
<% <%
idx++; idx++;
} }
@@ -299,17 +299,17 @@
<%-- Include a component for modifying sort by, order, results per page, and et-al limit --%> <%-- Include a component for modifying sort by, order, results per page, and et-al limit --%>
<div class="discovery-pagination-controls panel-footer"> <div class="discovery-pagination-controls panel-footer">
<form action="simple-search" method="get"> <form action="simple-search" method="get">
<input type="hidden" value="<%= StringEscapeUtils.escapeHtml(searchScope) %>" name="location" /> <input type="hidden" value="<%= Utils.addEntities(searchScope) %>" name="location" />
<input type="hidden" value="<%= StringEscapeUtils.escapeHtml(query) %>" name="query" /> <input type="hidden" value="<%= Utils.addEntities(query) %>" name="query" />
<% if (appliedFilterQueries.size() > 0 ) { <% if (appliedFilterQueries.size() > 0 ) {
int idx = 1; int idx = 1;
for (String[] filter : appliedFilters) for (String[] filter : appliedFilters)
{ {
boolean found = false; boolean found = false;
%> %>
<input type="hidden" id="filter_field_<%=idx %>" name="filter_field_<%=idx %>" value="<%= filter[0] %>" /> <input type="hidden" id="filter_field_<%=idx %>" name="filter_field_<%=idx %>" value="<%= Utils.addEntities(filter[0]) %>" />
<input type="hidden" id="filter_type_<%=idx %>" name="filter_type_<%=idx %>" value="<%= filter[1] %>" /> <input type="hidden" id="filter_type_<%=idx %>" name="filter_type_<%=idx %>" value="<%= Utils.addEntities(filter[1]) %>" />
<input type="hidden" id="filter_value_<%=idx %>" name="filter_value_<%=idx %>" value="<%= StringEscapeUtils.escapeHtml(filter[2]) %>" /> <input type="hidden" id="filter_value_<%=idx %>" name="filter_value_<%=idx %>" value="<%= Utils.addEntities(filter[2]) %>" />
<% <%
idx++; idx++;
} }
@@ -486,7 +486,7 @@ else if( qResults != null)
if (pageFirst != 1) if (pageFirst != 1)
{ {
%><li><a href="<%= firstURL %>">1</a></li><li>...</li><% %><li><a href="<%= firstURL %>">1</a></li><li class="disabled"><span>...</span></li><%
} }
for( long q = pageFirst; q <= pageLast; q++ ) for( long q = pageFirst; q <= pageLast; q++ )
@@ -576,7 +576,7 @@ else
if (pageFirst != 1) if (pageFirst != 1)
{ {
%><li><a href="<%= firstURL %>">1</a></li><li class="disabled"><span>...<span></li><% %><li><a href="<%= firstURL %>">1</a></li><li class="disabled"><span>...</span></li><%
} }
for( long q = pageFirst; q <= pageLast; q++ ) for( long q = pageFirst; q <= pageLast; q++ )
@@ -631,6 +631,7 @@ else
for (DiscoverySearchFilterFacet facetConf : facetsConf) for (DiscoverySearchFilterFacet facetConf : facetsConf)
{ {
if(qResults!=null) {
String f = facetConf.getIndexFieldName(); String f = facetConf.getIndexFieldName();
List<FacetResult> facet = qResults.getFacetResult(f); List<FacetResult> facet = qResults.getFacetResult(f);
if (facet.size() == 0) if (facet.size() == 0)
@@ -654,6 +655,7 @@ else
showFacets.put(f, showFacet); showFacets.put(f, showFacet);
brefine = brefine || showFacet; brefine = brefine || showFacet;
} }
}
if (brefine) { if (brefine) {
%> %>
@@ -747,4 +749,3 @@ else
<% } %> <% } %>
</dspace:sidebar> </dspace:sidebar>
</dspace:layout> </dspace:layout>

View File

@@ -396,7 +396,7 @@ if (pageTotal > pageCurrent)
</p> </p>
<form id="dso-display" action="<%=request.getContextPath()%>/dso-display" method="post"> <form id="dso-display" action="<%=request.getContextPath()%>/dso-display" method="post">
<input type="hidden" name="query" value="<%=query%>"/> <input type="hidden" name="query" value="<%=StringEscapeUtils.escapeHtml(query)%>"/>
<input type="hidden" name="rpp" value="<%=rpp%>"/> <input type="hidden" name="rpp" value="<%=rpp%>"/>
<input type="hidden" name="page" value="<%=pageCurrent%>"/> <input type="hidden" name="page" value="<%=pageCurrent%>"/>
<input type="hidden" name="sort_by" value="<%=(so != null ? so.getNumber() : 0)%>"/> <input type="hidden" name="sort_by" value="<%=(so != null ? so.getNumber() : 0)%>"/>

View File

@@ -9,7 +9,7 @@
HTML5 Shiv v3.6.2pre | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed HTML5 Shiv v3.6.2pre | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed
*/ */
(function(l,f){function m(){var a=e.elements;return"string"==typeof a?a.split(" "):a}function i(a){var b=n[a[o]];b||(b={},h++,a[o]=h,n[h]=b);return b}function p(a,b,c){b||(b=f);if(g)return b.createElement(a);c||(c=i(b));b=c.cache[a]?c.cache[a].cloneNode():r.test(a)?(c.cache[a]=c.createElem(a)).cloneNode():c.createElem(a);return b.canHaveChildren&&!s.test(a)?c.frag.appendChild(b):b}function t(a,b){if(!b.cache)b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag(); (function(l,f){function m(){var a=e.elements;return"string"==typeof a?a.split(" "):a}function i(a){var b=n[a[o]];b||(b={},h++,a[o]=h,n[h]=b);return b}function p(a,b,c){b||(b=f);if(g)return b.createElement(a);c||(c=i(b));b=c.cache[a]?c.cache[a].cloneNode():r.test(a)?(c.cache[a]=c.createElem(a)).cloneNode():c.createElem(a);return b.canHaveChildren&&!s.test(a)?c.frag.appendChild(b):b}function t(a,b){if(!b.cache)b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag();
a.createElement=function(c){return!e.shivMethods?b.createElem(c):p(c,a,b)};a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+m().join().replace(/\w+/g,function(a){b.createElem(a);b.frag.createElement(a);return'c("'+a+'")'})+");return n}")(e,b.frag)}function q(a){a||(a=f);var b=i(a);if(e.shivCSS&&!j&&!b.hasCSS){var c,d=a;c=d.createElement("p");d=d.getElementsByTagName("head")[0]||d.documentElement;c.innerHTML="x<style>article,aside,figcaption,figure,footer,header,hgroup,nav,section{display:block}mark{background:#FF0;color:#000}</style>"; a.createElement=function(c){return!e.shivMethods?b.createElem(c):p(c,a,b)};a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+m().join().replace(/\w+/g,function(a){b.createElem(a);b.frag.createElement(a);return'c("'+a+'")'})+");return n}")(e,b.frag)}function q(a){a||(a=f);var b=i(a);if(e.shivCSS&&!j&&!b.hasCSS){var c,d=a;c=d.createElement("p");d=d.getElementsByTagName("head")[0]||d.documentElement;c.innerHTML="x<style>article,aside,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}</style>";
c=d.insertBefore(c.lastChild,d.firstChild);b.hasCSS=!!c}g||t(a,b);return a}var k=l.html5||{},s=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,r=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,j,o="_html5shiv",h=0,n={},g;(function(){try{var a=f.createElement("a");a.innerHTML="<xyz></xyz>";j="hidden"in a;var b;if(!(b=1==a.childNodes.length)){f.createElement("a");var c=f.createDocumentFragment();b="undefined"==typeof c.cloneNode|| c=d.insertBefore(c.lastChild,d.firstChild);b.hasCSS=!!c}g||t(a,b);return a}var k=l.html5||{},s=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,r=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,j,o="_html5shiv",h=0,n={},g;(function(){try{var a=f.createElement("a");a.innerHTML="<xyz></xyz>";j="hidden"in a;var b;if(!(b=1==a.childNodes.length)){f.createElement("a");var c=f.createDocumentFragment();b="undefined"==typeof c.cloneNode||
"undefined"==typeof c.createDocumentFragment||"undefined"==typeof c.createElement}g=b}catch(d){g=j=!0}})();var e={elements:k.elements||"abbr article aside audio bdi canvas data datalist details figcaption figure footer header hgroup mark meter nav output progress section summary time video",version:"3.6.2pre",shivCSS:!1!==k.shivCSS,supportsUnknownElements:g,shivMethods:!1!==k.shivMethods,type:"default",shivDocument:q,createElement:p,createDocumentFragment:function(a,b){a||(a=f);if(g)return a.createDocumentFragment(); "undefined"==typeof c.createDocumentFragment||"undefined"==typeof c.createElement}g=b}catch(d){g=j=!0}})();var e={elements:k.elements||"abbr article aside audio bdi canvas data datalist details figcaption figure footer header hgroup main mark meter nav output progress section summary time video",version:"3.6.2pre",shivCSS:!1!==k.shivCSS,supportsUnknownElements:g,shivMethods:!1!==k.shivMethods,type:"default",shivDocument:q,createElement:p,createDocumentFragment:function(a,b){a||(a=f);if(g)return a.createDocumentFragment();
for(var b=b||i(a),c=b.frag.cloneNode(),d=0,e=m(),h=e.length;d<h;d++)c.createElement(e[d]);return c}};l.html5=e;q(f)})(this,document); for(var b=b||i(a),c=b.frag.cloneNode(),d=0,e=m(),h=e.length;d<h;d++)c.createElement(e[d]);return c}};l.html5=e;q(f)})(this,document);

View File

@@ -11,7 +11,7 @@
<parent> <parent>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<version>5.0</version> <version>5.3</version>
<relativePath>../..</relativePath> <relativePath>../..</relativePath>
</parent> </parent>

View File

@@ -11,7 +11,7 @@
<parent> <parent>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<version>5.0</version> <version>5.3</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>

View File

@@ -8,7 +8,7 @@
<parent> <parent>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<version>5.0</version> <version>5.3</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>

View File

@@ -128,7 +128,7 @@ public class XOAI {
if (clean) { if (clean) {
clearIndex(); clearIndex();
System.out.println("Using full import."); System.out.println("Using full import.");
this.indexAll(); result = this.indexAll();
} else { } else {
SolrQuery solrParams = new SolrQuery("*:*") SolrQuery solrParams = new SolrQuery("*:*")
.addField("item.lastmodified") .addField("item.lastmodified")
@@ -167,10 +167,11 @@ public class XOAI {
System.out System.out
.println("Incremental import. Searching for documents modified after: " .println("Incremental import. Searching for documents modified after: "
+ last.toString()); + last.toString());
// Index both in_archive items AND withdrawn items. Withdrawn items will be flagged withdrawn
String sqlQuery = "SELECT item_id FROM item WHERE in_archive=TRUE AND discoverable=TRUE AND last_modified > ?"; // (in order to notify external OAI harvesters of their new status)
String sqlQuery = "SELECT item_id FROM item WHERE (in_archive=TRUE OR withdrawn=TRUE) AND discoverable=TRUE AND last_modified > ?";
if(DatabaseManager.isOracle()){ if(DatabaseManager.isOracle()){
sqlQuery = "SELECT item_id FROM item WHERE in_archive=1 AND discoverable=1 AND last_modified > ?"; sqlQuery = "SELECT item_id FROM item WHERE (in_archive=1 OR withdrawn=1) AND discoverable=1 AND last_modified > ?";
} }
try { try {
@@ -187,10 +188,11 @@ public class XOAI {
private int indexAll() throws DSpaceSolrIndexerException { private int indexAll() throws DSpaceSolrIndexerException {
System.out.println("Full import"); System.out.println("Full import");
try { try {
// Index both in_archive items AND withdrawn items. Withdrawn items will be flagged withdrawn
String sqlQuery = "SELECT item_id FROM item WHERE in_archive=TRUE AND discoverable=TRUE"; // (in order to notify external OAI harvesters of their new status)
String sqlQuery = "SELECT item_id FROM item WHERE (in_archive=TRUE OR withdrawn=TRUE) AND discoverable=TRUE";
if(DatabaseManager.isOracle()){ if(DatabaseManager.isOracle()){
sqlQuery = "SELECT item_id FROM item WHERE in_archive=1 AND discoverable=1"; sqlQuery = "SELECT item_id FROM item WHERE (in_archive=1 OR withdrawn=1) AND discoverable=1";
} }
TableRowIterator iterator = DatabaseManager.query(context, TableRowIterator iterator = DatabaseManager.query(context,
@@ -244,7 +246,9 @@ public class XOAI {
String handle = item.getHandle(); String handle = item.getHandle();
doc.addField("item.handle", handle); doc.addField("item.handle", handle);
doc.addField("item.lastmodified", item.getLastModified()); doc.addField("item.lastmodified", item.getLastModified());
if (item.getSubmitter() != null) {
doc.addField("item.submitter", item.getSubmitter().getEmail()); doc.addField("item.submitter", item.getSubmitter().getEmail());
}
doc.addField("item.deleted", item.isWithdrawn() ? "true" : "false"); doc.addField("item.deleted", item.isWithdrawn() ? "true" : "false");
for (Collection col : item.getCollections()) for (Collection col : item.getCollections())
doc.addField("item.collections", doc.addField("item.collections",
@@ -287,17 +291,14 @@ public class XOAI {
} }
private boolean isPublic(Item item) { private boolean isPublic(Item item) {
boolean pub = false;
try { try {
AuthorizeManager.authorizeAction(context, item, Constants.READ); //Check if READ access allowed on this Item
for (Bundle b : item.getBundles()) pub = AuthorizeManager.authorizeActionBoolean(context, item, Constants.READ);
AuthorizeManager.authorizeAction(context, b, Constants.READ);
return true;
} catch (AuthorizeException ex) {
log.debug(ex.getMessage());
} catch (SQLException ex) { } catch (SQLException ex) {
log.error(ex.getMessage()); log.error(ex.getMessage());
} }
return false; return pub;
} }
@@ -355,6 +356,8 @@ public class XOAI {
XOAICacheService cacheService = applicationContext.getBean(XOAICacheService.class); XOAICacheService cacheService = applicationContext.getBean(XOAICacheService.class);
XOAIItemCacheService itemCacheService = applicationContext.getBean(XOAIItemCacheService.class); XOAIItemCacheService itemCacheService = applicationContext.getBean(XOAIItemCacheService.class);
Context ctx = null;
try { try {
CommandLineParser parser = new PosixParser(); CommandLineParser parser = new PosixParser();
Options options = new Options(); Options options = new Options();
@@ -394,7 +397,7 @@ public class XOAI {
String command = line.getArgs()[0]; String command = line.getArgs()[0];
if (COMMAND_IMPORT.equals(command)) { if (COMMAND_IMPORT.equals(command)) {
Context ctx = new Context(); ctx = new Context();
XOAI indexer = new XOAI(ctx, XOAI indexer = new XOAI(ctx,
line.hasOption('o'), line.hasOption('o'),
line.hasOption('c'), line.hasOption('c'),
@@ -404,21 +407,17 @@ public class XOAI {
int imported = indexer.index(); int imported = indexer.index();
if (imported > 0) cleanCache(itemCacheService, cacheService); if (imported > 0) cleanCache(itemCacheService, cacheService);
ctx.abort();
} else if (COMMAND_CLEAN_CACHE.equals(command)) { } else if (COMMAND_CLEAN_CACHE.equals(command)) {
cleanCache(itemCacheService, cacheService); cleanCache(itemCacheService, cacheService);
} else if (COMMAND_COMPILE_ITEMS.equals(command)) { } else if (COMMAND_COMPILE_ITEMS.equals(command)) {
Context ctx = new Context(); ctx = new Context();
XOAI indexer = new XOAI(ctx, line.hasOption('v')); XOAI indexer = new XOAI(ctx, line.hasOption('v'));
applicationContext.getAutowireCapableBeanFactory().autowireBean(indexer); applicationContext.getAutowireCapableBeanFactory().autowireBean(indexer);
indexer.compile(); indexer.compile();
cleanCache(itemCacheService, cacheService); cleanCache(itemCacheService, cacheService);
ctx.abort();
} else if (COMMAND_ERASE_COMPILED_ITEMS.equals(command)) { } else if (COMMAND_ERASE_COMPILED_ITEMS.equals(command)) {
cleanCompiledItems(itemCacheService); cleanCompiledItems(itemCacheService);
cleanCache(itemCacheService, cacheService); cleanCache(itemCacheService, cacheService);
@@ -436,6 +435,12 @@ public class XOAI {
} }
log.error(ex.getMessage(), ex); log.error(ex.getMessage(), ex);
} }
finally
{
// Abort our context, if still open
if(ctx!=null && ctx.isValid())
ctx.abort();
}
} }
private static void cleanCompiledItems(XOAIItemCacheService itemCacheService) throws IOException { private static void cleanCompiledItems(XOAIItemCacheService itemCacheService) throws IOException {

View File

@@ -145,7 +145,7 @@ public class DSpaceOAIDataProvider
} }
private void closeContext(Context context) { private void closeContext(Context context) {
if (context != null) if (context != null && context.isValid())
context.abort(); context.abort();
} }

View File

@@ -8,12 +8,10 @@
package org.dspace.xoai.filter; package org.dspace.xoai.filter;
import com.google.common.base.Function; import java.sql.SQLException;
import com.lyncode.builder.ListBuilder; import java.util.ArrayList;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterList; import java.util.List;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterMap;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterValue;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.SimpleType;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.log4j.LogManager; import org.apache.log4j.LogManager;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
@@ -25,12 +23,12 @@ import org.dspace.xoai.exceptions.InvalidMetadataFieldException;
import org.dspace.xoai.filter.data.DSpaceMetadataFilterOperator; import org.dspace.xoai.filter.data.DSpaceMetadataFilterOperator;
import org.dspace.xoai.filter.results.DatabaseFilterResult; import org.dspace.xoai.filter.results.DatabaseFilterResult;
import org.dspace.xoai.filter.results.SolrFilterResult; import org.dspace.xoai.filter.results.SolrFilterResult;
import org.dspace.xoai.services.api.database.FieldResolver;
import org.springframework.beans.factory.annotation.Autowired;
import java.sql.SQLException; import com.google.common.base.Function;
import java.util.ArrayList; import com.lyncode.builder.ListBuilder;
import java.util.List; import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterList;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterValue;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.SimpleType;
/** /**
* @author Lyncode Development Team <dspace@lyncode.com> * @author Lyncode Development Team <dspace@lyncode.com>
@@ -41,14 +39,6 @@ public class DSpaceAtLeastOneMetadataFilter extends DSpaceFilter {
private String field; private String field;
private DSpaceMetadataFilterOperator operator = DSpaceMetadataFilterOperator.UNDEF; private DSpaceMetadataFilterOperator operator = DSpaceMetadataFilterOperator.UNDEF;
private List<String> values; private List<String> values;
private ParameterMap configuration;
public DSpaceAtLeastOneMetadataFilter(ParameterMap configuration) {
this.configuration = configuration;
}
@Autowired
FieldResolver fieldResolver;
private String getField() { private String getField() {
if (field == null) { if (field == null) {
@@ -249,7 +239,4 @@ public class DSpaceAtLeastOneMetadataFilter extends DSpaceFilter {
} }
} }
public ParameterMap getConfiguration() {
return configuration;
}
} }

View File

@@ -8,11 +8,13 @@
package org.dspace.xoai.filter; package org.dspace.xoai.filter;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.LogManager; import org.apache.log4j.LogManager;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager; import org.dspace.authorize.AuthorizeManager;
import org.dspace.content.Bundle;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
@@ -21,10 +23,6 @@ import org.dspace.xoai.data.DSpaceItem;
import org.dspace.xoai.filter.results.DatabaseFilterResult; import org.dspace.xoai.filter.results.DatabaseFilterResult;
import org.dspace.xoai.filter.results.SolrFilterResult; import org.dspace.xoai.filter.results.SolrFilterResult;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
/** /**
* *
* @author Lyncode Development Team <dspace@lyncode.com> * @author Lyncode Development Team <dspace@lyncode.com>
@@ -32,11 +30,6 @@ import java.util.List;
public class DSpaceAuthorizationFilter extends DSpaceFilter public class DSpaceAuthorizationFilter extends DSpaceFilter
{ {
private static Logger log = LogManager.getLogger(DSpaceAuthorizationFilter.class); private static Logger log = LogManager.getLogger(DSpaceAuthorizationFilter.class);
private Context context;
public DSpaceAuthorizationFilter (Context context) {
this.context = context;
}
@Override @Override
public DatabaseFilterResult buildDatabaseQuery(Context context) public DatabaseFilterResult buildDatabaseQuery(Context context)
@@ -54,29 +47,25 @@ public class DSpaceAuthorizationFilter extends DSpaceFilter
@Override @Override
public boolean isShown(DSpaceItem item) public boolean isShown(DSpaceItem item)
{ {
boolean pub = false;
try try
{ {
// If Handle or Item are not found, return false
String handle = DSpaceItem.parseHandle(item.getIdentifier()); String handle = DSpaceItem.parseHandle(item.getIdentifier());
if (handle == null) return false; if (handle == null)
return false;
Item dspaceItem = (Item) HandleManager.resolveToObject(context, handle); Item dspaceItem = (Item) HandleManager.resolveToObject(context, handle);
AuthorizeManager.authorizeAction(context, dspaceItem, Constants.READ); if (dspaceItem == null)
for (Bundle b : dspaceItem.getBundles()) return false;
AuthorizeManager.authorizeAction(context, b, Constants.READ);
return true; // Check if READ access allowed on Item
} pub = AuthorizeManager.authorizeActionBoolean(context, dspaceItem, Constants.READ);
catch (AuthorizeException ex)
{
log.error(ex.getMessage(), ex);
} }
catch (SQLException ex) catch (SQLException ex)
{ {
log.error(ex.getMessage(), ex); log.error(ex.getMessage(), ex);
} }
catch (Exception ex) return pub;
{
log.error(ex.getMessage(), ex);
}
return false;
} }
@Override @Override

View File

@@ -9,10 +9,13 @@ package org.dspace.xoai.filter;
import com.lyncode.xoai.dataprovider.data.Filter; import com.lyncode.xoai.dataprovider.data.Filter;
import com.lyncode.xoai.dataprovider.data.ItemIdentifier; import com.lyncode.xoai.dataprovider.data.ItemIdentifier;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterMap;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.xoai.data.DSpaceItem; import org.dspace.xoai.data.DSpaceItem;
import org.dspace.xoai.filter.results.DatabaseFilterResult; import org.dspace.xoai.filter.results.DatabaseFilterResult;
import org.dspace.xoai.filter.results.SolrFilterResult; import org.dspace.xoai.filter.results.SolrFilterResult;
import org.dspace.xoai.services.api.database.FieldResolver;
/** /**
* *
@@ -20,6 +23,15 @@ import org.dspace.xoai.filter.results.SolrFilterResult;
*/ */
public abstract class DSpaceFilter implements Filter public abstract class DSpaceFilter implements Filter
{ {
/** The configuration from xoai.xml file */
protected ParameterMap configuration;
/** The configuration from xoai.xml file */
protected FieldResolver fieldResolver;
/** The oai context */
protected Context context;
public abstract DatabaseFilterResult buildDatabaseQuery(Context context); public abstract DatabaseFilterResult buildDatabaseQuery(Context context);
public abstract SolrFilterResult buildSolrQuery(); public abstract SolrFilterResult buildSolrQuery();
public abstract boolean isShown(DSpaceItem item); public abstract boolean isShown(DSpaceItem item);
@@ -33,4 +45,55 @@ public abstract class DSpaceFilter implements Filter
} }
return false; return false;
} }
/**
* @return the configuration map if defined in xoai.xml, otherwise null.
*/
public ParameterMap getConfiguration()
{
return configuration;
}
/**
* @param configuration
* the configuration map to set
*/
public void setConfiguration(ParameterMap configuration)
{
this.configuration = configuration;
}
/**
* @return the fieldResolver
*/
public FieldResolver getFieldResolver()
{
return fieldResolver;
}
/**
* @param fieldResolver
* the fieldResolver to set
*/
public void setFieldResolver(FieldResolver fieldResolver)
{
this.fieldResolver = fieldResolver;
}
/**
* @return the context
*/
public Context getContext()
{
return context;
}
/**
* @param context
* the context to set
*/
public void setContext(Context context)
{
this.context = context;
}
} }

View File

@@ -7,9 +7,10 @@
*/ */
package org.dspace.xoai.filter; package org.dspace.xoai.filter;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterMap; import java.sql.SQLException;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterValue; import java.util.ArrayList;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.SimpleType; import java.util.List;
import org.apache.log4j.LogManager; import org.apache.log4j.LogManager;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.core.Constants; import org.dspace.core.Constants;
@@ -18,11 +19,9 @@ import org.dspace.xoai.data.DSpaceItem;
import org.dspace.xoai.exceptions.InvalidMetadataFieldException; import org.dspace.xoai.exceptions.InvalidMetadataFieldException;
import org.dspace.xoai.filter.results.DatabaseFilterResult; import org.dspace.xoai.filter.results.DatabaseFilterResult;
import org.dspace.xoai.filter.results.SolrFilterResult; import org.dspace.xoai.filter.results.SolrFilterResult;
import org.dspace.xoai.services.api.database.FieldResolver;
import java.sql.SQLException; import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterValue;
import java.util.ArrayList; import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.SimpleType;
import java.util.List;
/** /**
* This filter allows one to retrieve (from the data source) those items * This filter allows one to retrieve (from the data source) those items
@@ -38,14 +37,7 @@ public class DSpaceMetadataExistsFilter extends DSpaceFilter {
private static Logger log = LogManager private static Logger log = LogManager
.getLogger(DSpaceMetadataExistsFilter.class); .getLogger(DSpaceMetadataExistsFilter.class);
private FieldResolver fieldResolver;
private List<String> fields; private List<String> fields;
private ParameterMap configuration;
public DSpaceMetadataExistsFilter(FieldResolver fieldResolver, ParameterMap configuration) {
this.fieldResolver = fieldResolver;
this.configuration = configuration;
}
private List<String> getFields() { private List<String> getFields() {
if (this.fields == null) { if (this.fields == null) {
@@ -114,7 +106,4 @@ public class DSpaceMetadataExistsFilter extends DSpaceFilter {
return new SolrFilterResult(cond.toString()); return new SolrFilterResult(cond.toString());
} }
public ParameterMap getConfiguration() {
return configuration;
}
} }

View File

@@ -48,11 +48,13 @@ public class DSpaceSetSpecFilter extends DSpaceFilter
{ {
try try
{ {
DSpaceObject dso = handleResolver.resolve(setSpec.replace("col_", "")); DSpaceObject dso = handleResolver.resolve(setSpec.replace("col_", "").replace("_", "/"));
if(dso != null){
return new DatabaseFilterResult( return new DatabaseFilterResult(
"EXISTS (SELECT tmp.* FROM collection2item tmp WHERE tmp.resource_id=i.item_id AND collection_id = ?)", "EXISTS (SELECT tmp.* FROM collection2item tmp WHERE tmp.resource_id=i.item_id AND collection_id = ?)",
dso.getID()); dso.getID());
} }
}
catch (Exception ex) catch (Exception ex)
{ {
log.error(ex.getMessage(), ex); log.error(ex.getMessage(), ex);
@@ -62,13 +64,15 @@ public class DSpaceSetSpecFilter extends DSpaceFilter
{ {
try try
{ {
DSpaceObject dso = handleResolver.resolve(setSpec.replace("com_", "")); DSpaceObject dso = handleResolver.resolve(setSpec.replace("com_", "").replace("_", "/"));
if(dso != null){
List<Integer> list = collectionsService.getAllSubCollections(dso.getID()); List<Integer> list = collectionsService.getAllSubCollections(dso.getID());
String subCollections = StringUtils.join(list.iterator(), ","); String subCollections = StringUtils.join(list.iterator(), ",");
return new DatabaseFilterResult( return new DatabaseFilterResult(
"EXISTS (SELECT tmp.* FROM collection2item tmp WHERE tmp.resource_id=i.item_id AND collection_id IN (" "EXISTS (SELECT tmp.* FROM collection2item tmp WHERE tmp.resource_id=i.item_id AND collection_id IN ("
+ subCollections + "))"); + subCollections + "))");
} }
}
catch (Exception e) catch (Exception e)
{ {
log.error(e.getMessage(), e); log.error(e.getMessage(), e);

View File

@@ -0,0 +1,64 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.xoai.filter;
import java.util.ArrayList;
import java.util.List;
import org.dspace.core.Context;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.xoai.data.DSpaceItem;
import org.dspace.xoai.filter.results.DatabaseFilterResult;
import org.dspace.xoai.filter.results.SolrFilterResult;
/**
* Filter for Withdrawn items. Enabling this filter allows tombstones for
* withdrawn items to be accessible via OAI-PMH. This allows us to properly
* flag withdrawn items with a "deleted" status. For more info on OAI-PMH
* "deleted" status, see:
* http://www.openarchives.org/OAI/openarchivesprotocol.html#deletion
* <P>
* (Don't worry, a tombstone doesn't display the withdrawn item's metadata or files.)
*
* @author Tim Donohue
*/
public class DSpaceWithdrawnFilter extends DSpaceFilter {
@Override
public DatabaseFilterResult buildDatabaseQuery(Context context)
{
List<Object> params = new ArrayList<Object>();
String filter = "i.withdrawn=TRUE";
if(DatabaseManager.isOracle())
filter = "i.withdrawn=1";
return new DatabaseFilterResult(filter, params);
}
@Override
public boolean isShown(DSpaceItem item)
{
// For DSpace, if an Item is withdrawn, "isDeleted()" will be true.
// In this scenario, we want a withdrawn item to be *shown* so that
// we can properly respond with a "deleted" status via OAI-PMH.
// Don't worry, this does NOT make the metadata public for withdrawn items,
// it merely provides an item "tombstone" via OAI-PMH.
if (item.isDeleted())
return true;
else
return false;
}
@Override
public SolrFilterResult buildSolrQuery()
{
// In Solr, we store withdrawn items as "deleted".
// See org.dspace.xoai.app.XOAI, index(Item) method.
return new SolrFilterResult("item.deleted:true");
}
}

View File

@@ -28,7 +28,7 @@ public class NotFilter extends DSpaceFilter {
@Override @Override
public SolrFilterResult buildSolrQuery() { public SolrFilterResult buildSolrQuery() {
return new SolrFilterResult("NOT("+inFilter.buildSolrQuery()+")"); return new SolrFilterResult("NOT("+inFilter.buildSolrQuery().getQuery()+")");
} }
@Override @Override

View File

@@ -36,7 +36,7 @@ public class OrFilter extends DSpaceFilter {
@Override @Override
public SolrFilterResult buildSolrQuery() { public SolrFilterResult buildSolrQuery() {
return new SolrFilterResult("("+left.buildSolrQuery()+") OR ("+right.buildSolrQuery()+")"); return new SolrFilterResult("("+left.buildSolrQuery().getQuery()+") OR ("+right.buildSolrQuery().getQuery()+")");
} }
@Override @Override

View File

@@ -52,12 +52,18 @@ public class DSpaceDatabaseQueryResolver implements DatabaseQueryResolver {
} }
countParameters.addAll(parameters); countParameters.addAll(parameters);
String whereInArchive = "WHERE i.in_archive=true";
if(DatabaseManager.isOracle())
{
whereInArchive = "WHERE i.in_archive=1";
}
if (!where.equals("")) { if (!where.equals("")) {
query += " WHERE i.in_archive=true AND " + where; query += " " + whereInArchive + " AND " + where;
countQuery += " WHERE i.in_archive=true AND " + where; countQuery += " " + whereInArchive + " AND " + where;
} else { } else {
query += " WHERE i.in_archive=true"; query += " " + whereInArchive;
countQuery += " WHERE i.in_archive=true"; countQuery += " " + whereInArchive;
} }
query += " ORDER BY i.item_id"; query += " ORDER BY i.item_id";

View File

@@ -79,20 +79,25 @@ public class BaseDSpaceFilterResolver implements DSpaceFilterResolver {
@Override @Override
public Filter getFilter(Class<? extends Filter> filterClass, ParameterMap configuration) { public Filter getFilter(Class<? extends Filter> filterClass, ParameterMap configuration) {
if (filterClass.isAssignableFrom(DSpaceAtLeastOneMetadataFilter.class)) { Filter result = null;
return new DSpaceAtLeastOneMetadataFilter(configuration); try
} else if (filterClass.isAssignableFrom(DSpaceAuthorizationFilter.class)) { {
try { result = filterClass.newInstance();
return new DSpaceAuthorizationFilter(contextService.getContext()); if (result instanceof DSpaceFilter)
} catch (ContextServiceException e) { {
LOGGER.error(e.getMessage(), e); // add the DSpace filter specific objects
return null; ((DSpaceFilter) result).setConfiguration(configuration);
((DSpaceFilter) result).setContext(contextService.getContext());
((DSpaceFilter) result).setFieldResolver(fieldResolver);
} }
} else if (filterClass.isAssignableFrom(DSpaceMetadataExistsFilter.class)) {
return new DSpaceMetadataExistsFilter(fieldResolver, configuration);
} }
LOGGER.error("Filter "+filterClass.getName()+" unknown instantiation"); catch (InstantiationException | IllegalAccessException
return null; | ContextServiceException e)
{
LOGGER.error("Filter " + filterClass.getName()
+ " could not be instantiated", e);
}
return result;
} }
@Override @Override

View File

@@ -83,7 +83,7 @@ public class DSpaceRepositoryConfiguration implements RepositoryConfiguration
@Override @Override
public DeleteMethod getDeleteMethod() public DeleteMethod getDeleteMethod()
{ {
return DeleteMethod.PERSISTENT; return DeleteMethod.TRANSIENT;
} }
@Override @Override

View File

@@ -25,35 +25,49 @@ public class DateUtils
private static Logger log = LogManager.getLogger(DateUtils.class); private static Logger log = LogManager.getLogger(DateUtils.class);
/**
* Format a Date object as a valid UTC Date String, per OAI-PMH guidelines
* http://www.openarchives.org/OAI/openarchivesprotocol.html#DatestampsResponses
*
* @param date Date object
* @return UTC date string
*/
public static String format(Date date) public static String format(Date date)
{ {
return format(date, true); // NOTE: OAI-PMH REQUIRES that all dates be expressed in UTC format
} // as YYYY-MM-DDThh:mm:ssZ For more details, see
public static String format(Date date, boolean init) // http://www.openarchives.org/OAI/openarchivesprotocol.html#DatestampsResponses
{ SimpleDateFormat sdf = new SimpleDateFormat(
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.'000Z'"); "yyyy-MM-dd'T'HH:mm:ss'Z'");
if (!init) sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.'999Z'");
// We indicate that the returned date is in Zulu time (UTC) so we have // We indicate that the returned date is in Zulu time (UTC) so we have
// to set the time zone of sdf correct. // to set the time zone of sdf correctly
sdf.setTimeZone(TimeZone.getTimeZone("ZULU")); sdf.setTimeZone(TimeZone.getTimeZone("ZULU"));
String ret = sdf.format(date); String ret = sdf.format(date);
return ret; return ret;
} }
/**
* Parse a string into a Date object
* @param date string to parse
* @return Date
*/
public static Date parse(String date) public static Date parse(String date)
{ {
// 2008-01-01T00:00:00Z // First try to parse as a full UTC date/time, e.g. 2008-01-01T00:00:00Z
SimpleDateFormat format = new SimpleDateFormat( SimpleDateFormat format = new SimpleDateFormat(
"yyyy-MM-dd'T'HH:mm:ss'Z'", Locale.getDefault()); "yyyy-MM-dd'T'HH:mm:ss'Z'");
// format.setTimeZone(TimeZone.getTimeZone("ZULU")); format.setTimeZone(TimeZone.getTimeZone("ZULU"));
Date ret; Date ret;
try try
{ {
ret = format.parse(date); ret = format.parse(date);
return ret; return ret;
} }
catch (ParseException e) catch (ParseException ex)
{ {
// If a parse exception, try other logical date/time formats
// based on the local timezone
format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss",
Locale.getDefault()); Locale.getDefault());
try try
@@ -62,7 +76,8 @@ public class DateUtils
} }
catch (ParseException e1) catch (ParseException e1)
{ {
format = new SimpleDateFormat("yyyy-MM-dd", Locale.getDefault()); format = new SimpleDateFormat("yyyy-MM-dd",
Locale.getDefault());
try try
{ {
return format.parse(date); return format.parse(date);
@@ -85,7 +100,7 @@ public class DateUtils
} }
catch (ParseException e4) catch (ParseException e4)
{ {
log.error(e4.getMessage(), e); log.error(e4.getMessage(), e4);
} }
} }
} }
@@ -96,7 +111,9 @@ public class DateUtils
public static Date parseFromSolrDate(String date) public static Date parseFromSolrDate(String date)
{ {
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'", Locale.getDefault()); SimpleDateFormat format = new SimpleDateFormat(
"yyyy-MM-dd'T'HH:mm:ss'Z'");
format.setTimeZone(TimeZone.getTimeZone("ZULU"));
Date ret; Date ret;
try try
{ {

View File

@@ -13,7 +13,8 @@
xmlns:lyn="http://www.lyncode.com/fakeNamespace" xmlns:oai_dc="http://www.openarchives.org/OAI/2.0/oai_dc/" xmlns:lyn="http://www.lyncode.com/fakeNamespace" xmlns:oai_dc="http://www.openarchives.org/OAI/2.0/oai_dc/"
xmlns:dc="http://purl.org/dc/doc:elements/1.1/" xmlns:dc="http://purl.org/dc/doc:elements/1.1/"
xmlns:verb="http://informatik.hu-berlin.de/xmlverbatim" xmlns:verb="http://informatik.hu-berlin.de/xmlverbatim"
exclude-result-prefixes="oai lyn oai_dc dc verb"> xmlns:oai_id="http://www.openarchives.org/OAI/2.0/oai-identifier"
exclude-result-prefixes="oai lyn oai_dc dc verb oai_id">
<xsl:output method="html" doctype-public="-//W3C//DTD HTML 4.01//EN" doctype-system="http://www.w3.org/TR/html4/strict.dtd" /> <xsl:output method="html" doctype-public="-//W3C//DTD HTML 4.01//EN" doctype-system="http://www.w3.org/TR/html4/strict.dtd" />
@@ -167,14 +168,14 @@
<tr> <tr>
<td><b>Repository identifier</b></td> <td><b>Repository identifier</b></td>
<td> <td>
<xsl:value-of select="oai:description/oai:oai-identifier/oai:repositoryIdentifier/text()" /> <xsl:value-of select="oai:description/oai_id:oai-identifier/oai_id:repositoryIdentifier/text()" />
</td> </td>
</tr> </tr>
<tr> <tr>
<td><b>Sample identifier</b></td> <td><b>Sample identifier</b></td>
<td> <td>
<xsl:value-of <xsl:value-of
select="oai:description/oai:oai-identifier/oai:sampleIdentifier/text()" /> select="oai:description/oai_id:oai-identifier/oai_id:sampleIdentifier/text()" />
</td> </td>
</tr> </tr>
<tr> <tr>
@@ -284,11 +285,15 @@
<h5>Identifier <small><xsl:value-of select="oai:header/oai:identifier/text()"></xsl:value-of></small></h5> <h5>Identifier <small><xsl:value-of select="oai:header/oai:identifier/text()"></xsl:value-of></small></h5>
</div> </div>
<div class="col-lg-6"> <div class="col-lg-6">
<h5>Last Modfied <small><xsl:value-of select="translate(oai:header/oai:datestamp/text(), 'TZ', ' ')"></xsl:value-of></small></h5> <h5>Last Modified <small><xsl:value-of select="translate(oai:header/oai:datestamp/text(), 'TZ', ' ')"></xsl:value-of></small></h5>
</div> </div>
</div> </div>
</div> </div>
<div class="panel-body"> <div class="panel-body">
<!-- If this record has a "status", display it as a warning -->
<xsl:if test="oai:header/@status">
<div class="alert alert-warning">Record Status: <xsl:value-of select="oai:header/@status"/></div>
</xsl:if>
<div class="panel panel-success"> <div class="panel panel-success">
<a data-toggle="collapse"> <a data-toggle="collapse">
<xsl:attribute name="href">#sets<xsl:value-of select="translate(oai:header/oai:identifier/text(), ':/.', '')"></xsl:value-of></xsl:attribute> <xsl:attribute name="href">#sets<xsl:value-of select="translate(oai:header/oai:identifier/text(), ':/.', '')"></xsl:value-of></xsl:attribute>
@@ -349,11 +354,15 @@
<h5>Identifier <small><xsl:value-of select="oai:header/oai:identifier/text()"></xsl:value-of></small></h5> <h5>Identifier <small><xsl:value-of select="oai:header/oai:identifier/text()"></xsl:value-of></small></h5>
</div> </div>
<div class="col-lg-6"> <div class="col-lg-6">
<h5>Last Modfied <small><xsl:value-of select="translate(oai:header/oai:datestamp/text(), 'TZ', ' ')"></xsl:value-of></small></h5> <h5>Last Modified <small><xsl:value-of select="translate(oai:header/oai:datestamp/text(), 'TZ', ' ')"></xsl:value-of></small></h5>
</div> </div>
</div> </div>
</div> </div>
<div class="panel-body"> <div class="panel-body">
<!-- If this record has a "status", display it as a warning -->
<xsl:if test="oai:header/@status">
<div class="alert alert-warning">Record Status: <xsl:value-of select="oai:header/@status"/></div>
</xsl:if>
<div class="panel panel-success"> <div class="panel panel-success">
<div class="panel-heading"> <div class="panel-heading">
<h5 class="panel-title"> <h5 class="panel-title">
@@ -409,7 +418,7 @@
<h5>Identifier <small><xsl:value-of select="oai:identifier/text()"></xsl:value-of></small></h5> <h5>Identifier <small><xsl:value-of select="oai:identifier/text()"></xsl:value-of></small></h5>
</div> </div>
<div class="col-lg-4"> <div class="col-lg-4">
<h5>Last Modfied <small><xsl:value-of select="translate(oai:datestamp/text(), 'TZ', ' ')"></xsl:value-of></small></h5> <h5>Last Modified <small><xsl:value-of select="translate(oai:datestamp/text(), 'TZ', ' ')"></xsl:value-of></small></h5>
</div> </div>
<div class="col-lg-4"> <div class="col-lg-4">
<a class="btn btn-default pull-right"> <a class="btn btn-default pull-right">
@@ -422,6 +431,10 @@
</div> </div>
</div> </div>
<div class="panel-body"> <div class="panel-body">
<!-- If this record has a "status", display it as a warning -->
<xsl:if test="@status">
<div class="alert alert-warning">Record Status: <xsl:value-of select="@status"/></div>
</xsl:if>
<div class="panel panel-success"> <div class="panel panel-success">
<a data-toggle="collapse"> <a data-toggle="collapse">
<xsl:attribute name="href">#sets<xsl:value-of select="translate(oai:identifier/text(), ':/.', '')"></xsl:value-of></xsl:attribute> <xsl:attribute name="href">#sets<xsl:value-of select="translate(oai:identifier/text(), ':/.', '')"></xsl:value-of></xsl:attribute>
@@ -500,7 +513,7 @@
<div class="text-center"> <div class="text-center">
<a class="btn btn-primary"> <a class="btn btn-primary">
<xsl:attribute name="href"> <xsl:attribute name="href">
<xsl:value-of select="concat(/oai:OAI-PMH/oai:request/text(), '?verb=ListSets&amp;resumptionToken=', text())"></xsl:value-of> <xsl:value-of select="concat(/oai:OAI-PMH/oai:request/text(), '?verb=',/oai:OAI-PMH/oai:request/@verb,'&amp;resumptionToken=', text())"></xsl:value-of>
</xsl:attribute> </xsl:attribute>
Show More Show More
</a> </a>

View File

@@ -7,16 +7,13 @@
*/ */
package org.dspace.xoai.tests.unit.services.impl.database; package org.dspace.xoai.tests.unit.services.impl.database;
import com.lyncode.builder.DateBuilder; import static org.hamcrest.CoreMatchers.is;
import com.lyncode.xoai.dataprovider.data.Filter; import static org.junit.Assert.assertThat;
import com.lyncode.xoai.dataprovider.filter.Scope;
import com.lyncode.xoai.dataprovider.filter.ScopedFilter; import java.util.ArrayList;
import com.lyncode.xoai.dataprovider.filter.conditions.AndCondition; import java.util.Date;
import com.lyncode.xoai.dataprovider.filter.conditions.Condition; import java.util.List;
import com.lyncode.xoai.dataprovider.filter.conditions.CustomCondition;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterList;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterMap;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.StringValue;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.xoai.filter.DSpaceMetadataExistsFilter; import org.dspace.xoai.filter.DSpaceMetadataExistsFilter;
import org.dspace.xoai.filter.DSpaceSetSpecFilter; import org.dspace.xoai.filter.DSpaceSetSpecFilter;
@@ -29,12 +26,15 @@ import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import java.util.ArrayList; import com.lyncode.builder.DateBuilder;
import java.util.Date; import com.lyncode.xoai.dataprovider.data.Filter;
import java.util.List; import com.lyncode.xoai.dataprovider.filter.Scope;
import com.lyncode.xoai.dataprovider.filter.ScopedFilter;
import static org.hamcrest.CoreMatchers.is; import com.lyncode.xoai.dataprovider.filter.conditions.AndCondition;
import static org.junit.Assert.assertThat; import com.lyncode.xoai.dataprovider.filter.conditions.Condition;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterList;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.ParameterMap;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.parameters.StringValue;
public class DSpaceDatabaseQueryResolverTest extends AbstractQueryResolverTest { public class DSpaceDatabaseQueryResolverTest extends AbstractQueryResolverTest {
private static final Date DATE = new Date(); private static final Date DATE = new Date();
@@ -110,10 +110,17 @@ public class DSpaceDatabaseQueryResolverTest extends AbstractQueryResolverTest {
.withValue(FIELD_1) .withValue(FIELD_1)
.withName("fields")); .withName("fields"));
scopedFilters.add(new ScopedFilter(new CustomCondition(getFilterResolver(), final DSpaceMetadataExistsFilter metadataExistsFilter = new DSpaceMetadataExistsFilter();
DSpaceMetadataExistsFilter.class, metadataExistsFilter.setConfiguration(filterConfiguration);
filterConfiguration), metadataExistsFilter.setFieldResolver(theFieldResolver());
Scope.Query)); scopedFilters.add(new ScopedFilter(new Condition()
{
@Override
public Filter getFilter()
{
return metadataExistsFilter;
}
}, Scope.Query));
DatabaseQuery result = underTest.buildQuery(scopedFilters, START, LENGTH); DatabaseQuery result = underTest.buildQuery(scopedFilters, START, LENGTH);
@@ -134,10 +141,17 @@ public class DSpaceDatabaseQueryResolverTest extends AbstractQueryResolverTest {
) )
.withName("fields")); .withName("fields"));
scopedFilters.add(new ScopedFilter(new CustomCondition(getFilterResolver(), final DSpaceMetadataExistsFilter metadataExistsFilter = new DSpaceMetadataExistsFilter();
DSpaceMetadataExistsFilter.class, metadataExistsFilter.setConfiguration(filterConfiguration);
filterConfiguration), metadataExistsFilter.setFieldResolver(theFieldResolver());
Scope.Query)); scopedFilters.add(new ScopedFilter(new Condition()
{
@Override
public Filter getFilter()
{
return metadataExistsFilter;
}
}, Scope.Query));
DatabaseQuery result = underTest.buildQuery(scopedFilters, START, LENGTH); DatabaseQuery result = underTest.buildQuery(scopedFilters, START, LENGTH);

View File

@@ -9,7 +9,7 @@
<parent> <parent>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<version>5.0</version> <version>5.3</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>

View File

@@ -3,73 +3,190 @@
A RESTful web services API for DSpace, built using JAX-RS1 JERSEY. A RESTful web services API for DSpace, built using JAX-RS1 JERSEY.
##Getting Started ##Getting Started
This REST API is integrated directly into the DSpace code-base. This REST API is integrated directly into the DSpace codebase.
* Rebuild as normal: mvn + ant * Rebuild as usual: mvn + ant
* Deploy the webapp (i.e to tomcat) * Deploy the webapp (i.e to Tomcat)
* ```<Context path="/rest" docBase="/dspace/webapps/rest" allowLinking="true"/>``` * ```<Context path="/rest" docBase="/dspace/webapps/rest" />```
At this point, this is a READ ONLY API for DSpace, for the anonymous user. Only Anonymous READ Communities, Collections, Items, and Bitstreams are available. REST API can do all CRUD (create, read, update, delete) operations over communities, collections, items, bitstream and bitstream policies. Without logging into the REST API, you have read access as an anonymous user (member of the Anonymous group). If you want to make changes in DSpace using the REST API, you must log into the API using the "login" endpoint and then use the returned token in request header of your subsequent API calls.
##Endpoints ##Endpoints
| Resource |CREATE|READ list|READ single|Edit|Delete|Search| | Resource |CREATE|READ list|READ single|Edit|Delete|Search|
| ------------- |------|:-------:|-----------|----|------|------| | ------------- |------|:-------:|-----------|----|------|------|
| /communities | | Y | Y | | | | | /communities | Y | Y | Y | Y | Y | |
| /collections | | Y | Y | | | | | /collections | Y | Y | Y | Y | Y | Y |
| /items | | | Y | | | | | /items | Y | Y | Y | Y | Y | Y |
| /bitstreams | | | Y | | | || | /bitstreams | Y | Y | Y | Y | Y | ||
Search in collections is possible only by name and search in items only by metadata field.
###Index
Get information on how to use the API
- GET http://localhost:8080
Test whether the REST API is running and available
- GET http://localhost:8080/rest/test
Log into REST API
- POST http://localhost:8080/rest/login
Logout from REST API
- POST http://localhost:8080/rest/logout
Get status of REST API and the logged-in user
- GET http://localhost:8080/rest/status
###Communities ###Communities
View the list of top-level communities View the list of top-level communities
- http://localhost:8080/rest/communities - GET http://localhost:8080/rest/communities/top-communities
View the list of all communities
- GET http://localhost:8080/rest/communities[?expand={collections,parentCommunity,subCommunities,logo,all}]
View a specific community View a specific community
- http://localhost:8080/rest/communities/:ID - GET http://localhost:8080/rest/communities/:ID[?expand={collections,parentCommunity,subCommunities,logo,all}]
View the list of subcollections in community
- GET http://localhost:8080/rest/communities/:ID/collections[?expand={items,parentCommunityList,license,logo,all}]
View the list of subcommunities in community
- GET http://localhost:8080/rest/communities/:ID/communities[?expand={collections,parentCommunity,subCommunities,logo,all}]
Create new top-level community
- POST http://localhost:8080/rest/communities
Create new subcollection in community
- POST http://localhost:8080/rest/communities/:ID/collections
Create new subcommunity in community
- POST http://localhost:8080/rest/communities/:ID/communities
Update community
- PUT http://localhost:8080/rest/communities/:ID
Delete community
- DELETE http://localhost:8080/rest/communities/:ID
Delete subcollection in community
- DELETE http://localhost:8080/rest/communities/:ID/collections/:ID
Delete subcommunity in community
- DELETE http://localhost:8080/rest/communities/:ID/communities/:ID
View a specific community, list its subcommunities, and subcollections
- http://localhost:8080/rest/communities/:ID?expand=all
###Collections ###Collections
View the list of collections View the list of collections
- http://localhost:8080/rest/collections - GET http://localhost:8080/rest/collections[?expand={items,parentCommunityList,license,logo,all}]
View a specific collection View a specific collection
- http://localhost:8080/rest/collections/:ID - GET http://localhost:8080/rest/collections/:ID[?expand={items,parentCommunityList,license,logo,all}]
View items in collection
- GET http://localhost:8080/rest/collections/:ID/items[?expand={metadata,parentCollection,parentcollectionList,parentCommunityList,bitstreams,all}]
Create item in collection
- POST http://localhost:8080/rest/collections/:ID/items
Find collection by name
- POST http://localhost:8080/rest/collections/find-collection
Update collection
- PUT http://localhost:8080/rest/collections/:ID
Delete collection
- DELETE http://localhost:8080/rest/collections/:ID
Delete item in collection
- DELETE http://localhost:8080/rest/collections/:ID/items/:ID
View a specific collection, and its items
- http://localhost:8080/rest/collections/:ID?expand=all
###Items ###Items
View an Item, and see its bitstreams View the list of items
- http://localhost:8080/rest/items/:ID - GET http://localhost:8080/rest/items[?expand={metadata,parentCollection,parentcollectionList,parentCommunityList,bitstreams,all}]
View speciific item
- GET http://localhost:8080/rest/items/:ID[?expand={metadata,parentCollection,parentcollectionList,parentCommunityList,bitstreams,all}]
View an Item and view its bitstreams
- GET http://localhost:8080/rest/items/:ID/bitstreams[?expand={parent,policies,all}]
View an Item, and view its metadata
- GET http://localhost:8080/rest/items/:ID/metadata
Find item by metadata
- POST http://localhost:8080/rest/items/find-by-metadata-field
Add metadata to item
- POST http://localhost:8080/rest/items/:ID/metadata
Create bitstream in item
- POST http://localhost:8080/rest/items/:ID/bitstreams
Update metadata in item
- PUT http://localhost:8080/rest/items/:ID/metadata
Delete item
- DELETE http://localhost:8080/rest/items/:ID
Delete all metadata in item
- DELETE http://localhost:8080/rest/items/:ID/metadata
Delete bitstream in item
- DELETE http://localhost:8080/rest/items/:ID/bitstreams/:ID
###Bitstreams ###Bitstreams
View the list of bitstreams
- GET http://localhost:8080/rest/bitstreams[?expand={parent,policies,all}]
View information about a bitstream View information about a bitstream
- http://localhost:8080/rest/bitstreams/:ID - GET http://localhost:8080/rest/bitstreams/:ID[?expand={parent,policies,all}]
View/Download a specific Bitstream View/Download a specific Bitstream
- http://localhost:8080/rest/bitstreams/:ID/retrieve - GET http://localhost:8080/rest/bitstreams/:ID/retrieve
View the list of policies of bitstream
- GET http://localhost:8080/rest/bitstreams/:ID/policy
Add policy to bitstream
- POST http://localhost:8080/rest/bitstreams/:ID/policy
Update bitstream
- PUT http://localhost:8080/rest/bitstreams/:ID
Update data of bitstream
- PUT http://localhost:8080/rest/bitstreams/:ID/data
Delete bitstream
- DELETE http://localhost:8080/rest/bitstreams/:ID
Delete policy of bitstream
- DELETE http://localhost:8080/rest/bitstreams/:ID/policy/:ID
####Statistics ####Statistics
Recording of statistics for view of items or download of bitstreams (set stats = true in rest.cfg to enable stats recording) Recording view events of items and download events of bitstreams (set stats = true in rest.cfg to enable recording of events)
http://localhost:8080/rest/items/:ID?userIP=ip&userAgent=userAgent&xforwardedfor=xforwardedfor http://localhost:8080/rest/items/:ID?userIP=ip&userAgent=userAgent&xforwardedfor=xforwardedfor
If no parameters are given the details of httprequest sender are used in statistics. If no parameters are given, the details of the HTTP request sender are used in statistics.
This enables tools to record the details of their user rather then themselves. This enables tools like proxies to supply the details of their user rather than themselves.
###Handles ###Handles
Lookup a DSpaceObject by its Handle, this produces the name/ID, that you lookup in /bitstreams, /items, /collections, /communities Lookup a DSpaceObject by its Handle, this produces the name/ID that you look up in /bitstreams, /items, /collections, /communities
- http://localhost:8080/rest/handle/{prefix}/{suffix} - http://localhost:8080/rest/handle/{prefix}/{suffix}
##Expand ##Expand
There is an ?expand= query parameter for more expensive operations. You can tack it on the end of endpoints. There is an ?expand= query parameter for more expensive operations. You can add it at the end of the request URL.
It is optional, all, some or none. The response will usually indicate what the available "expand" options are. It is optional, all, some or none. The response will usually indicate what the available "expand" options are.
##HTTP Responses ##HTTP Responses
* 200 OK - We have the requested object/objects * 200 OK - The requested object/objects exists
* 401 Unauthorized - The anonymous user does not have READ access to that object * 401 Unauthorized - The anonymous user does not have READ access to that object
* 404 Not Found - That object doesn't exist * 404 Not Found - The specified object doesn't exist
* 405 Method Not Allowed - Wrong request method (GET,POST,PUT,DELETE) or wrong data format (JSON/XML).
* 415 Unsupported Media Type - Missing "Content-Type: application/json" or "Content-Type: application/xml" request header
* 500 Server Error - Likely a SQLException, IOException, more details in the logs. * 500 Server Error - Likely a SQLException, IOException, more details in the logs.

View File

@@ -3,7 +3,7 @@
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-rest</artifactId> <artifactId>dspace-rest</artifactId>
<packaging>war</packaging> <packaging>war</packaging>
<version>5.0</version> <version>5.3</version>
<name>DSpace REST :: API and Implementation</name> <name>DSpace REST :: API and Implementation</name>
<description>DSpace RESTful Web Services API</description> <description>DSpace RESTful Web Services API</description>
<url>http://demo.dspace.org</url> <url>http://demo.dspace.org</url>
@@ -11,7 +11,7 @@
<parent> <parent>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<version>5.0</version> <version>5.3</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>

View File

@@ -146,25 +146,16 @@ public class BitstreamResource extends Resource
log.info("Reading bitstream(id=" + bitstreamId + ") policies."); log.info("Reading bitstream(id=" + bitstreamId + ") policies.");
org.dspace.core.Context context = null; org.dspace.core.Context context = null;
List<ResourcePolicy> policies = new ArrayList<ResourcePolicy>(); ResourcePolicy[] policies = null;
try try
{ {
context = createContext(getUser(headers)); context = createContext(getUser(headers));
org.dspace.content.Bitstream dspaceBitstream = findBitstream(context, bitstreamId, org.dspace.core.Constants.READ); org.dspace.content.Bitstream dspaceBitstream = findBitstream(context, bitstreamId, org.dspace.core.Constants.READ);
AuthorizeManager.getPolicies(context, dspaceBitstream);
policies = new Bitstream(dspaceBitstream,"policies").getPolicies();
Bundle[] bundles = dspaceBitstream.getBundles();
for (Bundle bundle : bundles)
{
List<org.dspace.authorize.ResourcePolicy> bitstreamsPolicies = bundle.getBitstreamPolicies();
for (org.dspace.authorize.ResourcePolicy policy : bitstreamsPolicies)
{
if (policy.getResourceID() == bitstreamId)
{
policies.add(new ResourcePolicy(policy));
}
}
}
context.complete(); context.complete();
log.trace("Policies for bitstream(id=" + bitstreamId + ") was successfully read."); log.trace("Policies for bitstream(id=" + bitstreamId + ") was successfully read.");
@@ -184,7 +175,7 @@ public class BitstreamResource extends Resource
processFinally(context); processFinally(context);
} }
return policies.toArray(new ResourcePolicy[0]); return policies;
} }
/** /**
@@ -359,42 +350,26 @@ public class BitstreamResource extends Resource
*/ */
@POST @POST
@Path("/{bitstream_id}/policy") @Path("/{bitstream_id}/policy")
@Consumes({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public javax.ws.rs.core.Response addBitstreamPolicy(@PathParam("bitstream_id") Integer bitstreamId, ResourcePolicy policy, public javax.ws.rs.core.Response addBitstreamPolicy(@PathParam("bitstream_id") Integer bitstreamId, ResourcePolicy policy,
@Context HttpHeaders headers) @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent,
@QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, @Context HttpServletRequest request)
throws WebApplicationException
{ {
log.info("Adding bitstream(id=" + bitstreamId + ") READ policy with permission for group(id=" + policy.getGroupId() log.info("Adding bitstream(id=" + bitstreamId + ") " + policy.getAction() + " policy with permission for group(id=" + policy.getGroupId()
+ ")."); + ").");
org.dspace.core.Context context = null; org.dspace.core.Context context = null;
try try
{ {
context = createContext(getUser(headers)); context = createContext(getUser(headers));
org.dspace.content.Bitstream dspaceBitstream = findBitstream(context, bitstreamId, org.dspace.core.Constants.READ); org.dspace.content.Bitstream dspaceBitstream = findBitstream(context, bitstreamId, org.dspace.core.Constants.WRITE);
Bundle[] bundles = dspaceBitstream.getBundles(); writeStats(dspaceBitstream, UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, headers,
request, context);
for (Bundle bundle : bundles) addPolicyToBitstream(context, policy, dspaceBitstream);
{
List<org.dspace.authorize.ResourcePolicy> bitstreamsPolicies = bundle.getBitstreamPolicies();
org.dspace.authorize.ResourcePolicy dspacePolicy = org.dspace.authorize.ResourcePolicy.create(context);
dspacePolicy.setAction(policy.getActionInt());
dspacePolicy.setGroup(Group.find(context, policy.getGroupId()));
dspacePolicy.setResourceID(dspaceBitstream.getID());
dspacePolicy.setResource(dspaceBitstream);
dspacePolicy.setResourceType(org.dspace.core.Constants.BITSTREAM);
dspacePolicy.setStartDate(policy.getStartDate());
dspacePolicy.setEndDate(policy.getEndDate());
dspacePolicy.setRpDescription(policy.getRpDescription());
dspacePolicy.setRpName(policy.getRpName());
dspacePolicy.update();
// dspacePolicy.setRpType(org.dspace.authorize.ResourcePolicy.TYPE_CUSTOM);
bitstreamsPolicies.add(dspacePolicy);
bundle.replaceAllBitstreamPolicies(bitstreamsPolicies);
bundle.update();
}
context.complete(); context.complete();
log.trace("Policy for bitstream(id=" + bitstreamId + ") was successfully added."); log.trace("Policy for bitstream(id=" + bitstreamId + ") was successfully added.");
@@ -485,43 +460,14 @@ public class BitstreamResource extends Resource
if (bitstream.getPolicies() != null) if (bitstream.getPolicies() != null)
{ {
Bundle[] bundles = dspaceBitstream.getBundles(); log.trace("Updating bitstream policies.");
ResourcePolicy[] policies = bitstream.getPolicies();
for (Bundle bundle : bundles) // Remove all old bitstream policies.
{ AuthorizeManager.removeAllPolicies(context,dspaceBitstream);
List<org.dspace.authorize.ResourcePolicy> bitstreamsPolicies = bundle.getBitstreamPolicies();
// Remove old bitstream policies
List<org.dspace.authorize.ResourcePolicy> policiesToRemove = new ArrayList<org.dspace.authorize.ResourcePolicy>();
for (org.dspace.authorize.ResourcePolicy policy : bitstreamsPolicies)
{
if (policy.getResourceID() == dspaceBitstream.getID())
{
policiesToRemove.add(policy);
}
}
for (org.dspace.authorize.ResourcePolicy policy : policiesToRemove)
{
bitstreamsPolicies.remove(policy);
}
// Add all new bitstream policies // Add all new bitstream policies
for (ResourcePolicy policy : policies) for (ResourcePolicy policy : bitstream.getPolicies()) {
{ addPolicyToBitstream(context, policy, dspaceBitstream);
org.dspace.authorize.ResourcePolicy dspacePolicy = org.dspace.authorize.ResourcePolicy.create(context);
dspacePolicy.setAction(policy.getActionInt());
dspacePolicy.setGroup(Group.find(context, policy.getGroupId()));
dspacePolicy.setResourceID(dspaceBitstream.getID());
dspacePolicy.setResource(dspaceBitstream);
dspacePolicy.setResourceType(org.dspace.core.Constants.BITSTREAM);
dspacePolicy.setStartDate(policy.getStartDate());
dspacePolicy.setEndDate(policy.getEndDate());
dspacePolicy.setRpDescription(policy.getRpDescription());
dspacePolicy.setRpName(policy.getRpName());
dspacePolicy.update();
bitstreamsPolicies.add(dspacePolicy);
}
bundle.replaceAllBitstreamPolicies(bitstreamsPolicies);
bundle.update();
} }
} }
@@ -730,55 +676,52 @@ public class BitstreamResource extends Resource
@DELETE @DELETE
@Path("/{bitstream_id}/policy/{policy_id}") @Path("/{bitstream_id}/policy/{policy_id}")
public javax.ws.rs.core.Response deleteBitstreamPolicy(@PathParam("bitstream_id") Integer bitstreamId, public javax.ws.rs.core.Response deleteBitstreamPolicy(@PathParam("bitstream_id") Integer bitstreamId,
@PathParam("policy_id") Integer policyId, @Context HttpHeaders headers) @PathParam("policy_id") Integer policyId, @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent,
@QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, @Context HttpServletRequest request)
throws WebApplicationException
{ {
log.info("Deleting policy(id=" + policyId + ") from bitstream(id=" + bitstreamId + ").");
log.info("Deleting bitstream(id=" + bitstreamId + ") READ policy(id=" + policyId + ").");
org.dspace.core.Context context = null; org.dspace.core.Context context = null;
try try
{ {
context = createContext(getUser(headers)); context = createContext(getUser(headers));
org.dspace.content.Bitstream dspaceBitstream = findBitstream(context, bitstreamId, org.dspace.core.Constants.READ); org.dspace.content.Bitstream dspaceBitstream = findBitstream(context, bitstreamId, org.dspace.core.Constants.WRITE);
Bundle[] bundles = dspaceBitstream.getBundles(); writeStats(dspaceBitstream, UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, headers,
request, context);
for (Bundle bundle : bundles) // Check if resource policy exists in bitstream.
{ boolean found = false;
List<org.dspace.authorize.ResourcePolicy> bitstreamsPolicies = bundle.getBitstreamPolicies(); List<org.dspace.authorize.ResourcePolicy> policies = AuthorizeManager.getPolicies(context, dspaceBitstream);
for(org.dspace.authorize.ResourcePolicy policy : policies) {
for (org.dspace.authorize.ResourcePolicy policy : bitstreamsPolicies) if(policy.getID() == policyId) {
{ found = true;
if (policy.getID() == policyId.intValue())
{
bitstreamsPolicies.remove(policy);
break; break;
} }
} }
bundle.replaceAllBitstreamPolicies(bitstreamsPolicies); if(found) {
bundle.update(); removePolicyFromBitstream(context, policyId, bitstreamId);
} else {
context.abort();
throw new WebApplicationException(Response.Status.NOT_FOUND);
} }
context.complete(); context.complete();
log.trace("Policy for bitstream(id=" + bitstreamId + ") was successfully added."); log.trace("Policy for bitstream(id=" + bitstreamId + ") was successfully removed.");
} }
catch (SQLException e) catch (SQLException e)
{ {
processException("Someting went wrong while deleting READ policy(id=" + policyId + ") to bitstream(id=" + bitstreamId processException("Someting went wrong while deleting policy(id=" + policyId + ") to bitstream(id=" + bitstreamId
+ "), SQLException! Message: " + e, context); + "), SQLException! Message: " + e, context);
} }
catch (ContextException e) catch (ContextException e)
{ {
processException("Someting went wrong while deleting READ policy(id=" + policyId + ") to bitstream(id=" + bitstreamId processException("Someting went wrong while deleting policy(id=" + policyId + ") to bitstream(id=" + bitstreamId
+ "), ContextException. Message: " + e.getMessage(), context); + "), ContextException. Message: " + e.getMessage(), context);
} }
catch (AuthorizeException e)
{
processException("Someting went wrong while deleting READ policy(id=" + policyId + ") to bitstream(id=" + bitstreamId
+ "), AuthorizeException! Message: " + e, context);
}
finally finally
{ {
processFinally(context); processFinally(context);
@@ -799,6 +742,41 @@ public class BitstreamResource extends Resource
return URLConnection.guessContentTypeFromName(name); return URLConnection.guessContentTypeFromName(name);
} }
/**
* Add policy(org.dspace.rest.common.ResourcePolicy) to bitstream.
* @param context Context to create DSpace ResourcePolicy.
* @param policy Policy which will be added to bitstream.
* @param dspaceBitstream
* @throws SQLException
* @throws AuthorizeException
*/
private void addPolicyToBitstream(org.dspace.core.Context context, ResourcePolicy policy, org.dspace.content.Bitstream dspaceBitstream) throws SQLException, AuthorizeException {
org.dspace.authorize.ResourcePolicy dspacePolicy = org.dspace.authorize.ResourcePolicy.create(context);
dspacePolicy.setAction(policy.getActionInt());
dspacePolicy.setGroup(Group.find(context, policy.getGroupId()));
dspacePolicy.setResourceID(dspaceBitstream.getID());
dspacePolicy.setResource(dspaceBitstream);
dspacePolicy.setResourceType(org.dspace.core.Constants.BITSTREAM);
dspacePolicy.setStartDate(policy.getStartDate());
dspacePolicy.setEndDate(policy.getEndDate());
dspacePolicy.setRpDescription(policy.getRpDescription());
dspacePolicy.setRpName(policy.getRpName());
dspacePolicy.update();
dspaceBitstream.updateLastModified();
}
/**
* Remove policy from bitstream. But only if resourceID of policy is same as bitstream id.
* @param context Context to delete policy.
* @param policyID Id of resource policy, which will be deleted.
* @param bitstreamID Id of bitstream.
* @throws SQLException
*/
private void removePolicyFromBitstream(org.dspace.core.Context context, int policyID, int bitstreamID) throws SQLException {
DatabaseManager.updateQuery(context, "DELETE FROM resourcepolicy WHERE POLICY_ID = ? AND RESOURCE_ID = ?", policyID,bitstreamID);
}
/** /**
* Find bitstream from DSpace database. This encapsulatets the * Find bitstream from DSpace database. This encapsulatets the
* org.dspace.content.Bitstream.find method with a check whether the item exists and * org.dspace.content.Bitstream.find method with a check whether the item exists and

View File

@@ -543,11 +543,9 @@ public class ItemsResource extends Resource
date.setSeconds(0); date.setSeconds(0);
dspacePolicy.setStartDate(date); dspacePolicy.setStartDate(date);
} }
dspacePolicy.update();
bitstreamsPolicies.add(dspacePolicy);
dspaceBundle.replaceAllBitstreamPolicies(bitstreamsPolicies); dspacePolicy.update();
dspaceBundle.update(); dspaceBitstream.updateLastModified();
} }
} }
@@ -916,22 +914,22 @@ public class ItemsResource extends Resource
* Find items by one metadada field. * Find items by one metadada field.
* *
* @param metadataEntry * @param metadataEntry
* Metadata field by which will be searched. * Metadata field to search by.
* @param scheme * @param scheme
* Scheme of metadata(key). * Scheme of metadata(key).
* @param value * @param value
* Value of metadata field. * Value of metadata field.
* @param headers * @param headers
* If you want to access to item under logged user into context. * If you want to access the item as the user logged into context,
* In headers must be set header "rest-dspace-token" with passed * header "rest-dspace-token" must be set to token value retrieved
* token from login method. * from the login method.
* @return Return array of founded items. * @return Return array of found items.
* @throws WebApplicationException * @throws WebApplicationException
* It can be thrown: SQLException, when was problem with * Can be thrown: SQLException - problem with
* database reading. AuthorizeException. when was problem with * database reading. AuthorizeException - problem with
* authorization to item. IOException when was problem with * authorization to item. IOException - problem with
* reading from metadata field. ContextException, when was * reading from metadata field. ContextException -
* problem with creating context of DSpace. * problem with creating DSpace context.
*/ */
@POST @POST
@Path("/find-by-metadata-field") @Path("/find-by-metadata-field")
@@ -981,7 +979,7 @@ public class ItemsResource extends Resource
throw new WebApplicationException(Response.Status.NOT_FOUND); throw new WebApplicationException(Response.Status.NOT_FOUND);
} }
String sql = "SELECT ITEM_ID, TEXT_VALUE, TEXT_LANG, SHORT_ID, ELEMENT, QUALIFIER " + String sql = "SELECT RESOURCE_ID, TEXT_VALUE, TEXT_LANG, SHORT_ID, ELEMENT, QUALIFIER " +
"FROM METADATAVALUE " + "FROM METADATAVALUE " +
"JOIN METADATAFIELDREGISTRY ON METADATAVALUE.METADATA_FIELD_ID = METADATAFIELDREGISTRY.METADATA_FIELD_ID " + "JOIN METADATAFIELDREGISTRY ON METADATAVALUE.METADATA_FIELD_ID = METADATAFIELDREGISTRY.METADATA_FIELD_ID " +
"JOIN METADATASCHEMAREGISTRY ON METADATAFIELDREGISTRY.METADATA_SCHEMA_ID = METADATASCHEMAREGISTRY.METADATA_SCHEMA_ID " + "JOIN METADATASCHEMAREGISTRY ON METADATAFIELDREGISTRY.METADATA_SCHEMA_ID = METADATASCHEMAREGISTRY.METADATA_SCHEMA_ID " +
@@ -992,7 +990,14 @@ public class ItemsResource extends Resource
{ {
sql += "QUALIFIER='" + metadata[2] + "' AND "; sql += "QUALIFIER='" + metadata[2] + "' AND ";
} }
sql += "dbms_lob.substr(TEXT_VALUE, 40)='" + metadataEntry.getValue() + "' AND "; if (org.dspace.storage.rdbms.DatabaseManager.isOracle())
{
sql += "dbms_lob.compare(TEXT_VALUE, '" + metadataEntry.getValue() + "') = 0 AND ";
}
else
{
sql += "TEXT_VALUE='" + metadataEntry.getValue() + "' AND ";
}
if (metadataEntry.getLanguage() != null) if (metadataEntry.getLanguage() != null)
{ {
sql += "TEXT_LANG='" + metadataEntry.getLanguage() + "'"; sql += "TEXT_LANG='" + metadataEntry.getLanguage() + "'";
@@ -1006,7 +1011,7 @@ public class ItemsResource extends Resource
while (iterator.hasNext()) while (iterator.hasNext())
{ {
TableRow row = iterator.next(); TableRow row = iterator.next();
org.dspace.content.Item dspaceItem = this.findItem(context, row.getIntColumn("ITEM_ID"), org.dspace.content.Item dspaceItem = this.findItem(context, row.getIntColumn("RESOURCE_ID"),
org.dspace.core.Constants.READ); org.dspace.core.Constants.READ);
Item item = new Item(dspaceItem, "", context); Item item = new Item(dspaceItem, "", context);
writeStats(dspaceItem, UsageEvent.Action.VIEW, user_ip, user_agent, xforwardedfor, headers, writeStats(dspaceItem, UsageEvent.Action.VIEW, user_ip, user_agent, xforwardedfor, headers,
@@ -1019,7 +1024,7 @@ public class ItemsResource extends Resource
} }
catch (SQLException e) catch (SQLException e)
{ {
processException("Something get wrong while finding item. SQLException, Message: " + e, context); processException("Something went wrong while finding item. SQLException, Message: " + e, context);
} }
catch (ContextException e) catch (ContextException e)
{ {

View File

@@ -11,6 +11,7 @@ import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.WebApplicationException; import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.HttpHeaders;
@@ -37,6 +38,8 @@ import org.dspace.utils.DSpace;
public class Resource public class Resource
{ {
@javax.ws.rs.core.Context public static ServletContext servletContext;
private static Logger log = Logger.getLogger(Resource.class); private static Logger log = Logger.getLogger(Resource.class);
private static final boolean writeStatistics; private static final boolean writeStatistics;
@@ -45,6 +48,9 @@ public class Resource
writeStatistics = ConfigurationManager.getBooleanProperty("rest", "stats", false); writeStatistics = ConfigurationManager.getBooleanProperty("rest", "stats", false);
} }
static public String getServletContextPath() {
return servletContext.getContextPath();
}
/** /**
* Create context to work with DSpace database. It can create context * Create context to work with DSpace database. It can create context
* with or without a logged in user (parameter user is null). Throws * with or without a logged in user (parameter user is null). Throws

View File

@@ -76,7 +76,12 @@ public class Bitstream extends DSpaceObject {
if(expandFields.contains("parent") || expandFields.contains("all")) { if(expandFields.contains("parent") || expandFields.contains("all")) {
parentObject = new DSpaceObject(bitstream.getParentObject()); parentObject = new DSpaceObject(bitstream.getParentObject());
} else if(expandFields.contains("policies") || expandFields.contains("all")) { } else {
this.addExpand("parent");
}
if(expandFields.contains("policies") || expandFields.contains("all")) {
// Find policies without context.
List<ResourcePolicy> tempPolicies = new ArrayList<ResourcePolicy>(); List<ResourcePolicy> tempPolicies = new ArrayList<ResourcePolicy>();
Bundle[] bundles = bitstream.getBundles(); Bundle[] bundles = bitstream.getBundles();
for (Bundle bundle : bundles) { for (Bundle bundle : bundles) {
@@ -90,7 +95,6 @@ public class Bitstream extends DSpaceObject {
policies = tempPolicies.toArray(new ResourcePolicy[0]); policies = tempPolicies.toArray(new ResourcePolicy[0]);
} else { } else {
this.addExpand("parent");
this.addExpand("policies"); this.addExpand("policies");
} }

View File

@@ -8,6 +8,7 @@
package org.dspace.rest.common; package org.dspace.rest.common;
import org.atteo.evo.inflector.English; import org.atteo.evo.inflector.English;
import org.dspace.rest.Resource;
import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlRootElement;
@@ -72,8 +73,7 @@ public class DSpaceObject {
} }
public String getLink() { public String getLink() {
//TODO, get actual contextPath of /rest/ return Resource.getServletContextPath() + "/" + English.plural(getType()) + "/" + getId();
return "/RESTapi/" + English.plural(getType()) + "/" + getId();
} }
public String getType() { public String getType() {

View File

@@ -11,6 +11,9 @@ import java.util.Date;
import org.codehaus.jackson.annotate.JsonIgnore; import org.codehaus.jackson.annotate.JsonIgnore;
import javax.xml.bind.annotation.XmlRootElement;
@XmlRootElement(name = "resourcepolicy")
public class ResourcePolicy{ public class ResourcePolicy{
public enum Action { public enum Action {

View File

@@ -9,7 +9,7 @@
<parent> <parent>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<version>5.0</version> <version>5.3</version>
</parent> </parent>
<properties> <properties>

View File

@@ -20,7 +20,7 @@
<parent> <parent>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<version>5.0</version> <version>5.3</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>

View File

@@ -15,7 +15,7 @@
<parent> <parent>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<version>5.0</version> <version>5.3</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>

View File

@@ -13,7 +13,7 @@
<parent> <parent>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<version>5.0</version> <version>5.3</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>

View File

@@ -11,7 +11,7 @@
<parent> <parent>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<version>5.0</version> <version>5.3</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>

View File

@@ -456,3 +456,11 @@ header {
color: $text-color; color: $text-color;
border: 1px solid $text-color; border: 1px solid $text-color;
} }
.didYouMean {
margin-bottom: $line-height-computed / 2;
a {
font-weight: bold;
}
}

View File

@@ -326,7 +326,7 @@
<xsl:template name="itemSummaryView-DIM-file-section"> <xsl:template name="itemSummaryView-DIM-file-section">
<xsl:choose> <xsl:choose>
<xsl:when test="//mets:fileSec/mets:fileGrp[@USE='CONTENT' or @USE='ORIGINAL' or @USE='LICENSE']/mets:file"> <xsl:when test="//mets:fileSec/mets:fileGrp[@USE='CONTENT' or @USE='ORIGINAL' or @USE='LICENSE']/mets:file">
<div class="item-page-field-wrapper table"> <div class="item-page-field-wrapper table word-break">
<h5> <h5>
<i18n:text>xmlui.dri2xhtml.METS-1.0.item-files-viewOpen</i18n:text> <i18n:text>xmlui.dri2xhtml.METS-1.0.item-files-viewOpen</i18n:text>
</h5> </h5>

View File

@@ -335,6 +335,15 @@
</p> </p>
</div> </div>
</div> </div>
<xsl:if test="dri:item[@id='aspect.discovery.SimpleSearch.item.did-you-mean']">
<div class="row">
<div class="col-sm-offset-3 col-sm-9">
<xsl:apply-templates select="dri:item[@id='aspect.discovery.SimpleSearch.item.did-you-mean']"/>
</div>
</div>
</xsl:if>
<div class="row"> <div class="row">
<div class="col-sm-offset-3 col-sm-9" id="filters-overview-wrapper-squared"/> <div class="col-sm-offset-3 col-sm-9" id="filters-overview-wrapper-squared"/>
</div> </div>
@@ -357,6 +366,9 @@
</p> </p>
</div> </div>
</div> </div>
<xsl:if test="dri:item[@id='aspect.discovery.SimpleSearch.item.did-you-mean']">
<xsl:apply-templates select="dri:item[@id='aspect.discovery.SimpleSearch.item.did-you-mean']"/>
</xsl:if>
<div id="filters-overview-wrapper-squared"/> <div id="filters-overview-wrapper-squared"/>
</xsl:template> </xsl:template>

View File

@@ -1212,7 +1212,7 @@
</div> </div>
</xsl:template> </xsl:template>
<xsl:template match="dri:*[count(dri:field) > 1 and dri:field[@type='button'] and count(dri:field[not(@type='button' or @type='')]) = 0]" priority="4"> <xsl:template match="dri:*[count(dri:field) > 1 and dri:field[@type='button'] and count(dri:field[not(@type='button' or @type='')]) = 0 and not(preceding-sibling::*[1][local-name()='label'])]" priority="4">
<div> <div>
<xsl:call-template name="standardAttributes"> <xsl:call-template name="standardAttributes">
</xsl:call-template> </xsl:call-template>
@@ -1321,9 +1321,9 @@
<xsl:text> btn-default</xsl:text> <xsl:text> btn-default</xsl:text>
</xsl:if> </xsl:if>
</xsl:when> </xsl:when>
<xsl:otherwise> <xsl:when test="not(@type='file')">
<xsl:text>form-control </xsl:text> <xsl:text>form-control </xsl:text>
</xsl:otherwise> </xsl:when>
</xsl:choose> </xsl:choose>
<xsl:if test="@rend"> <xsl:if test="@rend">

View File

@@ -696,7 +696,7 @@
<hr/> <hr/>
<div class="col-xs-7 col-sm-8"> <div class="col-xs-7 col-sm-8">
<div> <div>
<a href="http://www.dspace.org/" target="_blank">DSpace software</a> copyright&#160;&#169;&#160;2002-2013&#160; <a href="http://www.duraspace.org/" target="_blank">Duraspace</a> <a href="http://www.dspace.org/" target="_blank">DSpace software</a> copyright&#160;&#169;&#160;2002-2015&#160; <a href="http://www.duraspace.org/" target="_blank">DuraSpace</a>
</div> </div>
<div class="hidden-print"> <div class="hidden-print">
<a> <a>

View File

@@ -38,6 +38,7 @@
<item> <item>
<xsl:copy-of select="dri:item/dri:field[@id='aspect.discovery.SimpleSearch.field.scope']"/> <xsl:copy-of select="dri:item/dri:field[@id='aspect.discovery.SimpleSearch.field.scope']"/>
<xsl:copy-of select="dri:item/dri:field[@id='aspect.discovery.SimpleSearch.field.query']"/> <xsl:copy-of select="dri:item/dri:field[@id='aspect.discovery.SimpleSearch.field.query']"/>
<xsl:copy-of select="dri:item[@id='aspect.discovery.SimpleSearch.item.did-you-mean']"/>
<xsl:copy-of select="dri:item/dri:field[@id='aspect.discovery.SimpleSearch.field.submit']"/> <xsl:copy-of select="dri:item/dri:field[@id='aspect.discovery.SimpleSearch.field.submit']"/>
</item> </item>
</list> </list>

View File

@@ -11,7 +11,7 @@
<parent> <parent>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<version>5.0</version> <version>5.3</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>

View File

@@ -482,7 +482,7 @@ public class ControlPanel extends AbstractDSpaceTransformer implements Serviceab
* @param value candidate string. * @param value candidate string.
* @return {@code value} or a constant indicating an unset value. * @return {@code value} or a constant indicating an unset value.
*/ */
private static String notnull(String value) { return null == value ? T_UNSET : value; } private static String notempty(String value) { return (null == value || "".equals(value)) ? T_UNSET : value; }
/** /**
* List important DSpace configuration parameters. * List important DSpace configuration parameters.
@@ -498,46 +498,46 @@ public class ControlPanel extends AbstractDSpaceTransformer implements Serviceab
dspace.addItem(Util.getSourceVersion()); dspace.addItem(Util.getSourceVersion());
dspace.addLabel(T_DSPACE_DIR); dspace.addLabel(T_DSPACE_DIR);
dspace.addItem(notnull(ConfigurationManager.getProperty("dspace.dir"))); dspace.addItem(notempty(ConfigurationManager.getProperty("dspace.dir")));
dspace.addLabel(T_DSPACE_URL); dspace.addLabel(T_DSPACE_URL);
dspace.addItem(notnull(ConfigurationManager.getProperty("dspace.url"))); dspace.addItem(notempty(ConfigurationManager.getProperty("dspace.url")));
dspace.addLabel(T_DSPACE_HOST_NAME); dspace.addLabel(T_DSPACE_HOST_NAME);
dspace.addItem(notnull(ConfigurationManager.getProperty("dspace.hostname"))); dspace.addItem(notempty(ConfigurationManager.getProperty("dspace.hostname")));
dspace.addLabel(T_DSPACE_NAME); dspace.addLabel(T_DSPACE_NAME);
dspace.addItem(notnull(ConfigurationManager.getProperty("dspace.name"))); dspace.addItem(notempty(ConfigurationManager.getProperty("dspace.name")));
dspace.addLabel(T_DB_NAME); dspace.addLabel(T_DB_NAME);
dspace.addItem(notnull(DatabaseManager.getDbName())); dspace.addItem(notempty(DatabaseManager.getDbName()));
dspace.addLabel(T_DB_URL); dspace.addLabel(T_DB_URL);
dspace.addItem(notnull(ConfigurationManager.getProperty("db.url"))); dspace.addItem(notempty(ConfigurationManager.getProperty("db.url")));
dspace.addLabel(T_DB_DRIVER); dspace.addLabel(T_DB_DRIVER);
dspace.addItem(notnull(ConfigurationManager.getProperty("db.driver"))); dspace.addItem(notempty(ConfigurationManager.getProperty("db.driver")));
dspace.addLabel(T_DB_MAX_CONN); dspace.addLabel(T_DB_MAX_CONN);
dspace.addItem(notnull(ConfigurationManager.getProperty("db.maxconnections"))); dspace.addItem(notempty(ConfigurationManager.getProperty("db.maxconnections")));
dspace.addLabel(T_DB_MAX_WAIT); dspace.addLabel(T_DB_MAX_WAIT);
dspace.addItem(notnull(ConfigurationManager.getProperty("db.maxwait"))); dspace.addItem(notempty(ConfigurationManager.getProperty("db.maxwait")));
dspace.addLabel(T_DB_MAX_IDLE); dspace.addLabel(T_DB_MAX_IDLE);
dspace.addItem(notnull(ConfigurationManager.getProperty("db.maxidle"))); dspace.addItem(notempty(ConfigurationManager.getProperty("db.maxidle")));
dspace.addLabel(T_MAIL_SERVER); dspace.addLabel(T_MAIL_SERVER);
dspace.addItem(notnull(ConfigurationManager.getProperty("mail.server"))); dspace.addItem(notempty(ConfigurationManager.getProperty("mail.server")));
dspace.addLabel(T_MAIL_FROM_ADDRESS); dspace.addLabel(T_MAIL_FROM_ADDRESS);
dspace.addItem(notnull(ConfigurationManager.getProperty("mail.from.address"))); dspace.addItem(notempty(ConfigurationManager.getProperty("mail.from.address")));
dspace.addLabel(T_FEEDBACK_RECIPIENT); dspace.addLabel(T_FEEDBACK_RECIPIENT);
dspace.addItem(notnull(ConfigurationManager.getProperty("feedback.recipient"))); dspace.addItem(notempty(ConfigurationManager.getProperty("feedback.recipient")));
dspace.addLabel(T_MAIL_ADMIN); dspace.addLabel(T_MAIL_ADMIN);
dspace.addItem(notnull(ConfigurationManager.getProperty("mail.admin"))); dspace.addItem(notempty(ConfigurationManager.getProperty("mail.admin")));
} }
/** /**

View File

@@ -11,7 +11,6 @@ import java.io.IOException;
import java.io.Serializable; import java.io.Serializable;
import java.sql.SQLException; import java.sql.SQLException;
import java.text.MessageFormat; import java.text.MessageFormat;
import org.apache.cocoon.caching.CacheableProcessingComponent; import org.apache.cocoon.caching.CacheableProcessingComponent;
import org.apache.cocoon.environment.ObjectModelHelper; import org.apache.cocoon.environment.ObjectModelHelper;
import org.apache.cocoon.environment.Request; import org.apache.cocoon.environment.Request;
@@ -19,6 +18,8 @@ import org.apache.cocoon.util.HashUtil;
import org.apache.excalibur.source.SourceValidity; import org.apache.excalibur.source.SourceValidity;
import org.apache.excalibur.source.impl.validity.NOPValidity; import org.apache.excalibur.source.impl.validity.NOPValidity;
import org.dspace.app.requestitem.RequestItem; import org.dspace.app.requestitem.RequestItem;
import org.dspace.app.requestitem.RequestItemAuthor;
import org.dspace.app.requestitem.RequestItemAuthorExtractor;
import org.dspace.app.xmlui.cocoon.AbstractDSpaceTransformer; import org.dspace.app.xmlui.cocoon.AbstractDSpaceTransformer;
import org.dspace.app.xmlui.utils.ContextUtil; import org.dspace.app.xmlui.utils.ContextUtil;
import org.dspace.app.xmlui.utils.HandleUtil; import org.dspace.app.xmlui.utils.HandleUtil;
@@ -40,6 +41,7 @@ import org.dspace.core.Context;
import org.dspace.core.I18nUtil; import org.dspace.core.I18nUtil;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.handle.HandleManager; import org.dspace.handle.HandleManager;
import org.dspace.utils.DSpace;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
/** /**
@@ -126,14 +128,18 @@ public class ItemRequestResponseFalseForm extends AbstractDSpaceTransformer impl
else else
title = "untitled"; title = "untitled";
EPerson submitter = item.getSubmitter(); RequestItemAuthor author = new DSpace()
.getServiceManager()
.getServiceByName(RequestItemAuthorExtractor.class.getName(),
RequestItemAuthorExtractor.class)
.getRequestItemAuthor(context, item);
Object[] args = new String[]{ Object[] args = new String[]{
requestItem.getReqName(), requestItem.getReqName(), // User
HandleManager.getCanonicalForm(item.getHandle()), // User HandleManager.getCanonicalForm(item.getHandle()), // URL
title, // request item title title, // request item title
submitter.getFullName(), // # submmiter name author.getFullName(),
submitter.getEmail() // # submmiter email author.getEmail()
}; };
String subject = I18nUtil.getMessage("itemRequest.response.subject.reject", context); String subject = I18nUtil.getMessage("itemRequest.response.subject.reject", context);

View File

@@ -11,7 +11,6 @@ import java.io.IOException;
import java.io.Serializable; import java.io.Serializable;
import java.sql.SQLException; import java.sql.SQLException;
import java.text.MessageFormat; import java.text.MessageFormat;
import org.apache.cocoon.caching.CacheableProcessingComponent; import org.apache.cocoon.caching.CacheableProcessingComponent;
import org.apache.cocoon.environment.ObjectModelHelper; import org.apache.cocoon.environment.ObjectModelHelper;
import org.apache.cocoon.environment.Request; import org.apache.cocoon.environment.Request;
@@ -19,6 +18,8 @@ import org.apache.cocoon.util.HashUtil;
import org.apache.excalibur.source.SourceValidity; import org.apache.excalibur.source.SourceValidity;
import org.apache.excalibur.source.impl.validity.NOPValidity; import org.apache.excalibur.source.impl.validity.NOPValidity;
import org.dspace.app.requestitem.RequestItem; import org.dspace.app.requestitem.RequestItem;
import org.dspace.app.requestitem.RequestItemAuthor;
import org.dspace.app.requestitem.RequestItemAuthorExtractor;
import org.dspace.app.xmlui.cocoon.AbstractDSpaceTransformer; import org.dspace.app.xmlui.cocoon.AbstractDSpaceTransformer;
import org.dspace.app.xmlui.utils.ContextUtil; import org.dspace.app.xmlui.utils.ContextUtil;
import org.dspace.app.xmlui.utils.UIException; import org.dspace.app.xmlui.utils.UIException;
@@ -39,6 +40,7 @@ import org.dspace.eperson.EPerson;
import org.dspace.handle.HandleManager; import org.dspace.handle.HandleManager;
import org.dspace.storage.rdbms.DatabaseManager; import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow; import org.dspace.storage.rdbms.TableRow;
import org.dspace.utils.DSpace;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
/** /**
@@ -125,14 +127,18 @@ public class ItemRequestResponseTrueForm extends AbstractDSpaceTransformer imple
else else
title = "untitled"; title = "untitled";
EPerson submitter = item.getSubmitter(); RequestItemAuthor author = new DSpace()
.getServiceManager()
.getServiceByName(RequestItemAuthorExtractor.class.getName(),
RequestItemAuthorExtractor.class)
.getRequestItemAuthor(context, item);
Object[] args = new String[]{ Object[] args = new String[]{
requestItem.getReqName(), requestItem.getReqName(), // User
HandleManager.getCanonicalForm(item.getHandle()), // User HandleManager.getCanonicalForm(item.getHandle()), // URL
title, // request item title title, // request item title
submitter.getFullName(), // # submmiter name author.getFullName(),
submitter.getEmail() // # submmiter email author.getEmail()
}; };
String subject = I18nUtil.getMessage("itemRequest.response.subject.approve", context); String subject = I18nUtil.getMessage("itemRequest.response.subject.approve", context);

View File

@@ -42,6 +42,11 @@ public abstract class AbstractRecentSubmissionTransformer extends AbstractDSpace
*/ */
protected DiscoverResult queryResults; protected DiscoverResult queryResults;
/**
* The maximum number of recent submissions read from configuration.
*/
protected int maxRecentSubmissions;
/** Cached validity object */ /** Cached validity object */
private SourceValidity validity; private SourceValidity validity;
@@ -132,7 +137,8 @@ public abstract class AbstractRecentSubmissionTransformer extends AbstractDSpace
DiscoveryRecentSubmissionsConfiguration recentSubmissionConfiguration = discoveryConfiguration.getRecentSubmissionConfiguration(); DiscoveryRecentSubmissionsConfiguration recentSubmissionConfiguration = discoveryConfiguration.getRecentSubmissionConfiguration();
if(recentSubmissionConfiguration != null){ if(recentSubmissionConfiguration != null){
queryArgs.setMaxResults(recentSubmissionConfiguration.getMax()); maxRecentSubmissions = recentSubmissionConfiguration.getMax();
queryArgs.setMaxResults(maxRecentSubmissions);
String sortField = SearchUtils.getSearchService().toSortFieldIndex(recentSubmissionConfiguration.getMetadataSortField(), recentSubmissionConfiguration.getType()); String sortField = SearchUtils.getSearchService().toSortFieldIndex(recentSubmissionConfiguration.getMetadataSortField(), recentSubmissionConfiguration.getType());
if(sortField != null){ if(sortField != null){
queryArgs.setSortField( queryArgs.setSortField(
@@ -171,6 +177,7 @@ public abstract class AbstractRecentSubmissionTransformer extends AbstractDSpace
public void recycle() { public void recycle() {
queryResults = null; queryResults = null;
validity = null; validity = null;
maxRecentSubmissions = 0;
super.recycle(); super.recycle();
} }

Some files were not shown because too many files have changed in this diff Show More