mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-07 01:54:22 +00:00
Compare commits
134 Commits
test_merge
...
61f2695b83
Author | SHA1 | Date | |
---|---|---|---|
![]() |
61f2695b83 | ||
![]() |
8643888d68 | ||
![]() |
d8fbe16ede | ||
![]() |
29e13b77fc | ||
![]() |
fc74a7ffdf | ||
![]() |
6ac823d29a | ||
![]() |
0246588ab5 | ||
![]() |
190b908a8a | ||
![]() |
f8ac04b591 | ||
![]() |
23cd582801 | ||
![]() |
c100eb2567 | ||
![]() |
de50420624 | ||
![]() |
988a67d8fe | ||
![]() |
7a95781068 | ||
![]() |
cad7e9397b | ||
![]() |
f84ddb541d | ||
![]() |
45fa8277d8 | ||
![]() |
d4c826d871 | ||
![]() |
ee5093b6a6 | ||
![]() |
e672be5eaa | ||
![]() |
fcde7489d3 | ||
![]() |
4402847f38 | ||
![]() |
a180a257cd | ||
![]() |
467e42ca27 | ||
![]() |
13c6835656 | ||
![]() |
f700c37a73 | ||
![]() |
0902db21eb | ||
![]() |
11ba34b1fd | ||
![]() |
936cb3c101 | ||
![]() |
7c7bbeadc5 | ||
![]() |
5e3c57a920 | ||
![]() |
beb5a096ab | ||
![]() |
73a0316f99 | ||
![]() |
257dc13d32 | ||
![]() |
dfa38c90df | ||
![]() |
cd62cac431 | ||
![]() |
847904cadb | ||
![]() |
127770219b | ||
![]() |
98e89caa35 | ||
![]() |
a063a7a4f6 | ||
![]() |
7dd2b5318b | ||
![]() |
29b3ba9c57 | ||
![]() |
568fdab395 | ||
![]() |
fe4077acee | ||
![]() |
cbecc64a5a | ||
![]() |
5ed0b28a36 | ||
![]() |
54a6011e60 | ||
![]() |
be9d2bd2bf | ||
![]() |
e4d058f1c9 | ||
![]() |
603f638174 | ||
![]() |
66fc49800d | ||
![]() |
376f49e99b | ||
![]() |
4dedd1b66f | ||
![]() |
8f4a3f963a | ||
![]() |
cc35141661 | ||
![]() |
fd1aa013a7 | ||
![]() |
3721d231b3 | ||
![]() |
45b677c20e | ||
![]() |
ccf253ddda | ||
![]() |
2c34dde3a8 | ||
![]() |
13982618df | ||
![]() |
cf3f8e9bb2 | ||
![]() |
9bef7b58d6 | ||
![]() |
b78f570dbf | ||
![]() |
103d7f754a | ||
![]() |
ff0ebea316 | ||
![]() |
f831acf4d7 | ||
![]() |
21343d3158 | ||
![]() |
abc238cdd0 | ||
![]() |
922109b10a | ||
![]() |
23150bc89a | ||
![]() |
e51a2537ac | ||
![]() |
94a1a0b2e0 | ||
![]() |
71af1503ae | ||
![]() |
d063f1ef66 | ||
![]() |
be3702f419 | ||
![]() |
aa0fe083a6 | ||
![]() |
38dd432feb | ||
![]() |
bce865f299 | ||
![]() |
558f0c7bea | ||
![]() |
eec374a7c5 | ||
![]() |
375c0aee1c | ||
![]() |
56b38bb26d | ||
![]() |
e4f9e90394 | ||
![]() |
05af860553 | ||
![]() |
ea6e025519 | ||
![]() |
b8cbaa053d | ||
![]() |
af654e57f2 | ||
![]() |
3bca2164be | ||
![]() |
389269e958 | ||
![]() |
2d353ea995 | ||
![]() |
e0dd7454a4 | ||
![]() |
7153efe412 | ||
![]() |
0a3733db95 | ||
![]() |
ee45f736cd | ||
![]() |
808135f688 | ||
![]() |
1ecb5660f8 | ||
![]() |
6282cb9f12 | ||
![]() |
2b2d0b19b6 | ||
![]() |
77f07a735b | ||
![]() |
613ef1dc62 | ||
![]() |
5e3463bd8b | ||
![]() |
e3c64906de | ||
![]() |
8c79201da3 | ||
![]() |
c22583e892 | ||
![]() |
af38697ade | ||
![]() |
ccdbc381c5 | ||
![]() |
584d70db81 | ||
![]() |
ceab9526c4 | ||
![]() |
45c37bcf66 | ||
![]() |
69a83f0bdd | ||
![]() |
18d53743cd | ||
![]() |
f6f2d5e05c | ||
![]() |
dc65c7a304 | ||
![]() |
de220d55ac | ||
![]() |
ac81a3d36f | ||
![]() |
92120211da | ||
![]() |
609062befb | ||
![]() |
ffd60bf811 | ||
![]() |
695aa673d3 | ||
![]() |
2f66457abe | ||
![]() |
c061015ecf | ||
![]() |
6e8b6cc33a | ||
![]() |
479cb76885 | ||
![]() |
588c4ef4d2 | ||
![]() |
5f291400b3 | ||
![]() |
1bd4da3899 | ||
![]() |
15b3f314a3 | ||
![]() |
28686fc48a | ||
![]() |
d2c120bd8b | ||
![]() |
39fb2afba1 | ||
![]() |
5cd3daa379 | ||
![]() |
9424ccf4aa | ||
![]() |
a381611ebd |
6
.github/workflows/codescan.yml
vendored
6
.github/workflows/codescan.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
# https://github.com/github/codeql-action
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
# Codescan Javascript as well since a few JS files exist in REST API's interface
|
||||
languages: java, javascript
|
||||
@@ -56,8 +56,8 @@ jobs:
|
||||
# NOTE: Based on testing, this autobuild process works well for DSpace. A custom
|
||||
# DSpace build w/caching (like in build.yml) was about the same speed as autobuild.
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# Perform GitHub Code Scanning.
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
uses: github/codeql-action/analyze@v3
|
||||
|
13
.github/workflows/docker.yml
vendored
13
.github/workflows/docker.yml
vendored
@@ -219,6 +219,19 @@ jobs:
|
||||
result=$(wget -O- -q http://127.0.0.1:8080/server/api/core/collections)
|
||||
echo "$result"
|
||||
echo "$result" | grep -oE "\"Dog in Yard\","
|
||||
# Verify basic backend logging is working.
|
||||
# 1. Access the top communities list. Verify that the "Before request" INFO statement is logged
|
||||
# 2. Access an invalid endpoint (and ignore 404 response). Verify that a "status:404" WARN statement is logged
|
||||
- name: Verify backend is logging properly
|
||||
run: |
|
||||
wget -O/dev/null -q http://127.0.0.1:8080/server/api/core/communities/search/top
|
||||
logs=$(docker compose -f docker-compose.yml logs -n 5 dspace)
|
||||
echo "$logs"
|
||||
echo "$logs" | grep -o "Before request \[GET /server/api/core/communities/search/top\]"
|
||||
wget -O/dev/null -q http://127.0.0.1:8080/server/api/does/not/exist || true
|
||||
logs=$(docker compose -f docker-compose.yml logs -n 5 dspace)
|
||||
echo "$logs"
|
||||
echo "$logs" | grep -o "status:404 exception: The repository type does.not was not found"
|
||||
# Verify Handle Server can be stared and is working properly
|
||||
# 1. First generate the "[dspace]/handle-server" folder with the sitebndl.zip
|
||||
# 2. Start the Handle Server (and wait 20 seconds to let it start up)
|
||||
|
4
.github/workflows/reusable-docker-build.yml
vendored
4
.github/workflows/reusable-docker-build.yml
vendored
@@ -165,7 +165,7 @@ jobs:
|
||||
# Use GitHub cache to load cached Docker images and cache the results of this build
|
||||
# This decreases the number of images we need to fetch from DockerHub
|
||||
cache-from: type=gha,scope=${{ inputs.build_id }}
|
||||
cache-to: type=gha,scope=${{ inputs.build_id }},mode=max
|
||||
cache-to: type=gha,scope=${{ inputs.build_id }},mode=min
|
||||
|
||||
# Export the digest of Docker build locally
|
||||
- name: Export Docker build digest
|
||||
@@ -217,7 +217,7 @@ jobs:
|
||||
# Use GitHub cache to load cached Docker images and cache the results of this build
|
||||
# This decreases the number of images we need to fetch from DockerHub
|
||||
cache-from: type=gha,scope=${{ inputs.build_id }}
|
||||
cache-to: type=gha,scope=${{ inputs.build_id }},mode=max
|
||||
cache-to: type=gha,scope=${{ inputs.build_id }},mode=min
|
||||
# Export image to a local TAR file
|
||||
outputs: type=docker,dest=/tmp/${{ inputs.build_id }}.tar
|
||||
|
||||
|
@@ -233,9 +233,9 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Kerby-kerb Util (org.apache.kerby:kerb-util:1.0.1 - http://directory.apache.org/kerby/kerby-kerb/kerb-util)
|
||||
* Kerby ASN1 Project (org.apache.kerby:kerby-asn1:1.0.1 - http://directory.apache.org/kerby/kerby-common/kerby-asn1)
|
||||
* Kerby PKIX Project (org.apache.kerby:kerby-pkix:1.0.1 - http://directory.apache.org/kerby/kerby-pkix)
|
||||
* Apache Log4j 1.x Compatibility API (org.apache.logging.log4j:log4j-1.2-api:2.25.0 - https://logging.apache.org/log4j/2.x/)
|
||||
* Apache Log4j API (org.apache.logging.log4j:log4j-api:2.25.0 - https://logging.apache.org/log4j/2.x/)
|
||||
* Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.25.0 - https://logging.apache.org/log4j/2.x/)
|
||||
* Apache Log4j 1.x Compatibility API (org.apache.logging.log4j:log4j-1.2-api:2.25.1 - https://logging.apache.org/log4j/2.x/)
|
||||
* Apache Log4j API (org.apache.logging.log4j:log4j-api:2.25.1 - https://logging.apache.org/log4j/2.x/)
|
||||
* Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.25.1 - https://logging.apache.org/log4j/2.x/)
|
||||
* Apache Log4j JUL Adapter (org.apache.logging.log4j:log4j-jul:2.17.2 - https://logging.apache.org/log4j/2.x/log4j-jul/)
|
||||
* Apache Log4j Layout for JSON template (org.apache.logging.log4j:log4j-layout-template-json:2.17.2 - https://logging.apache.org/log4j/2.x/log4j-layout-template-json/)
|
||||
* Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.17.2 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/)
|
||||
|
@@ -12,7 +12,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -99,20 +99,6 @@
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>build-helper-maven-plugin</artifactId>
|
||||
<version>3.6.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>validate</phase>
|
||||
<goals>
|
||||
<goal>maven-version</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>buildnumber-maven-plugin</artifactId>
|
||||
@@ -752,7 +738,7 @@
|
||||
<dependency>
|
||||
<groupId>com.amazonaws</groupId>
|
||||
<artifactId>aws-java-sdk-s3</artifactId>
|
||||
<version>1.12.785</version>
|
||||
<version>1.12.791</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -801,7 +787,7 @@
|
||||
<dependency>
|
||||
<groupId>com.opencsv</groupId>
|
||||
<artifactId>opencsv</artifactId>
|
||||
<version>5.11.1</version>
|
||||
<version>5.12.0</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Email templating -->
|
||||
@@ -884,32 +870,32 @@
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-buffer</artifactId>
|
||||
<version>4.2.2.Final</version>
|
||||
<version>4.2.6.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-transport</artifactId>
|
||||
<version>4.2.2.Final</version>
|
||||
<version>4.2.6.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-transport-native-unix-common</artifactId>
|
||||
<version>4.2.2.Final</version>
|
||||
<version>4.2.6.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-common</artifactId>
|
||||
<version>4.2.2.Final</version>
|
||||
<version>4.2.6.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-handler</artifactId>
|
||||
<version>4.2.2.Final</version>
|
||||
<version>4.2.6.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-codec</artifactId>
|
||||
<version>4.2.2.Final</version>
|
||||
<version>4.2.6.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.velocity</groupId>
|
||||
@@ -919,7 +905,7 @@
|
||||
<dependency>
|
||||
<groupId>org.xmlunit</groupId>
|
||||
<artifactId>xmlunit-core</artifactId>
|
||||
<version>2.10.2</version>
|
||||
<version>2.10.3</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
@@ -18,6 +18,7 @@ import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
import java.text.DateFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.time.ZoneOffset;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.Iterator;
|
||||
@@ -154,7 +155,7 @@ public class BulkAccessControl extends DSpaceRunnable<BulkAccessControlScriptCon
|
||||
}
|
||||
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
mapper.setTimeZone(TimeZone.getTimeZone("UTC"));
|
||||
mapper.setTimeZone(TimeZone.getTimeZone(ZoneOffset.UTC));
|
||||
BulkAccessControlInput accessControl;
|
||||
context = new Context(Context.Mode.BATCH_EDIT);
|
||||
setEPerson(context);
|
||||
|
@@ -14,6 +14,8 @@ import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.DefaultParser.Builder;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataDSpaceCsvExportServiceImpl;
|
||||
@@ -167,4 +169,14 @@ public class MetadataExportSearch extends DSpaceRunnable<MetadataExportSearchScr
|
||||
}
|
||||
return scopeObj;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected StepResult parse(String[] args) throws ParseException {
|
||||
commandLine = new DefaultParser().parse(getScriptConfiguration().getOptions(), args);
|
||||
Builder builder = new DefaultParser().builder();
|
||||
builder.setStripLeadingAndTrailingQuotes(false);
|
||||
commandLine = builder.build().parse(getScriptConfiguration().getOptions(), args);
|
||||
setup();
|
||||
return StepResult.Continue;
|
||||
}
|
||||
}
|
||||
|
@@ -494,7 +494,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
||||
|
||||
// Check it has an owning collection
|
||||
List<String> collections = line.get("collection");
|
||||
if (collections == null) {
|
||||
if (collections == null || collections.isEmpty()) {
|
||||
throw new MetadataImportException(
|
||||
"New items must have a 'collection' assigned in the form of a handle");
|
||||
}
|
||||
|
@@ -363,12 +363,14 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
File outFile = new File(destDir, "collections");
|
||||
if (outFile.createNewFile()) {
|
||||
try (PrintWriter out = new PrintWriter(new FileWriter(outFile))) {
|
||||
String ownerHandle = item.getOwningCollection().getHandle();
|
||||
out.println(ownerHandle);
|
||||
Collection owningCollection = item.getOwningCollection();
|
||||
// The owning collection is null for workspace and workflow items
|
||||
if (owningCollection != null) {
|
||||
out.println(owningCollection.getHandle());
|
||||
}
|
||||
for (Collection collection : item.getCollections()) {
|
||||
String collectionHandle = collection.getHandle();
|
||||
if (!collectionHandle.equals(ownerHandle)) {
|
||||
out.println(collectionHandle);
|
||||
if (!collection.equals(owningCollection)) {
|
||||
out.println(collection.getHandle());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -22,6 +22,7 @@ import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.tika.Tika;
|
||||
import org.dspace.app.itemimport.factory.ItemImportServiceFactory;
|
||||
@@ -333,6 +334,7 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
||||
protected void readZip(Context context, ItemImportService itemImportService) throws Exception {
|
||||
Optional<InputStream> optionalFileStream = Optional.empty();
|
||||
Optional<InputStream> validationFileStream = Optional.empty();
|
||||
try {
|
||||
if (!remoteUrl) {
|
||||
// manage zip via upload
|
||||
optionalFileStream = handler.getFileStream(context, zipfilename);
|
||||
@@ -360,6 +362,10 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
||||
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
|
||||
+ File.separator + context.getCurrentUser().getID());
|
||||
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
|
||||
} finally {
|
||||
optionalFileStream.ifPresent(IOUtils::closeQuietly);
|
||||
validationFileStream.ifPresent(IOUtils::closeQuietly);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -17,6 +17,7 @@ import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.app.itemimport.service.ItemImportService;
|
||||
import org.dspace.content.Collection;
|
||||
@@ -111,7 +112,11 @@ public class ItemImportCLI extends ItemImport {
|
||||
|
||||
// validate zip file
|
||||
InputStream validationFileStream = new FileInputStream(myZipFile);
|
||||
try {
|
||||
validateZip(validationFileStream);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(validationFileStream);
|
||||
}
|
||||
|
||||
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
|
||||
+ File.separator + context.getCurrentUser().getID());
|
||||
@@ -120,9 +125,11 @@ public class ItemImportCLI extends ItemImport {
|
||||
} else {
|
||||
// manage zip via remote url
|
||||
Optional<InputStream> optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||
Optional<InputStream> validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||
try {
|
||||
if (optionalFileStream.isPresent()) {
|
||||
// validate zip file via url
|
||||
Optional<InputStream> validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||
|
||||
if (validationFileStream.isPresent()) {
|
||||
validateZip(validationFileStream.get());
|
||||
}
|
||||
@@ -137,6 +144,10 @@ public class ItemImportCLI extends ItemImport {
|
||||
throw new IllegalArgumentException(
|
||||
"Error reading file, the file couldn't be found for filename: " + zipfilename);
|
||||
}
|
||||
} finally {
|
||||
optionalFileStream.ifPresent(IOUtils::closeQuietly);
|
||||
validationFileStream.ifPresent(IOUtils::closeQuietly);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -131,7 +131,7 @@ public final class CheckerCommand {
|
||||
collector.collect(context, info);
|
||||
}
|
||||
|
||||
context.uncacheEntity(bitstream);
|
||||
context.commit();
|
||||
bitstream = dispatcher.next();
|
||||
}
|
||||
}
|
||||
|
@@ -56,8 +56,8 @@ public class MostRecentChecksumDAOImpl extends AbstractHibernateDAO<MostRecentCh
|
||||
criteriaQuery.where(criteriaBuilder.and(
|
||||
criteriaBuilder.equal(mostRecentChecksumRoot.get(MostRecentChecksum_.toBeProcessed), false),
|
||||
criteriaBuilder
|
||||
.lessThanOrEqualTo(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), startDate),
|
||||
criteriaBuilder.greaterThan(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), endDate)
|
||||
.lessThanOrEqualTo(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), endDate),
|
||||
criteriaBuilder.greaterThan(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), startDate)
|
||||
)
|
||||
);
|
||||
List<Order> orderList = new LinkedList<>();
|
||||
|
@@ -34,35 +34,37 @@ import org.xml.sax.InputSource;
|
||||
* from {@code ${dspace.dir}/config/controlled-vocabularies/*.xml} and turns
|
||||
* them into autocompleting authorities.
|
||||
*
|
||||
* Configuration: This MUST be configured as a self-named plugin, e.g.: {@code
|
||||
* plugin.selfnamed.org.dspace.content.authority.ChoiceAuthority = \
|
||||
* <p>Configuration: This MUST be configured as a self-named plugin, e.g.: {@code
|
||||
* plugin.selfnamed.org.dspace.content.authority.ChoiceAuthority =
|
||||
* org.dspace.content.authority.DSpaceControlledVocabulary
|
||||
* }
|
||||
*
|
||||
* It AUTOMATICALLY configures a plugin instance for each XML file in the
|
||||
* <p>It AUTOMATICALLY configures a plugin instance for each XML file in the
|
||||
* controlled vocabularies directory. The name of the plugin is the basename of
|
||||
* the file; e.g., {@code ${dspace.dir}/config/controlled-vocabularies/nsi.xml}
|
||||
* would generate a plugin called "nsi".
|
||||
*
|
||||
* Each configured plugin comes with three configuration options: {@code
|
||||
* vocabulary.plugin._plugin_.hierarchy.store = <true|false>
|
||||
* # Store entire hierarchy along with selected value. Default: TRUE
|
||||
* vocabulary.plugin._plugin_.hierarchy.suggest =
|
||||
* <true|false> # Display entire hierarchy in the suggestion list. Default: TRUE
|
||||
* vocabulary.plugin._plugin_.delimiter = "<string>"
|
||||
* # Delimiter to use when building hierarchy strings. Default: "::"
|
||||
* }
|
||||
* <p>Each configured plugin comes with three configuration options:
|
||||
* <ul>
|
||||
* <li>{@code vocabulary.plugin._plugin_.hierarchy.store = <true|false>}
|
||||
* # Store entire hierarchy along with selected value. Default: TRUE</li>
|
||||
* <li>{@code vocabulary.plugin._plugin_.hierarchy.suggest =
|
||||
* <true|false> # Display entire hierarchy in the suggestion list. Default: TRUE}</li>
|
||||
* <li>{@code vocabulary.plugin._plugin_.delimiter = "<string>"
|
||||
* # Delimiter to use when building hierarchy strings. Default: "::"}</li>
|
||||
* </ul>
|
||||
*
|
||||
* @author Michael B. Klein
|
||||
*/
|
||||
|
||||
public class DSpaceControlledVocabulary extends SelfNamedPlugin implements HierarchicalAuthority {
|
||||
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(DSpaceControlledVocabulary.class);
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger();
|
||||
protected static String xpathTemplate = "//node[contains(translate(@label,'ABCDEFGHIJKLMNOPQRSTUVWXYZ'," +
|
||||
"'abcdefghijklmnopqrstuvwxyz'),'%s')]";
|
||||
protected static String idTemplate = "//node[@id = '%s']";
|
||||
protected static String labelTemplate = "//node[@label = '%s']";
|
||||
"'abcdefghijklmnopqrstuvwxyz'),%s)]";
|
||||
protected static String idTemplate = "//node[@id = %s]";
|
||||
protected static String idTemplateQuoted = "//node[@id = '%s']";
|
||||
protected static String labelTemplate = "//node[@label = %s]";
|
||||
protected static String idParentTemplate = "//node[@id = '%s']/parent::isComposedBy/parent::node";
|
||||
protected static String rootTemplate = "/node";
|
||||
protected static String pluginNames[] = null;
|
||||
@@ -106,7 +108,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
File.separator + "config" +
|
||||
File.separator + "controlled-vocabularies";
|
||||
String[] xmlFiles = (new File(vocabulariesPath)).list(new xmlFilter());
|
||||
List<String> names = new ArrayList<String>();
|
||||
List<String> names = new ArrayList<>();
|
||||
for (String filename : xmlFiles) {
|
||||
names.add((new File(filename)).getName().replace(".xml", ""));
|
||||
}
|
||||
@@ -162,14 +164,23 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
public Choices getMatches(String text, int start, int limit, String locale) {
|
||||
init();
|
||||
log.debug("Getting matches for '" + text + "'");
|
||||
String xpathExpression = "";
|
||||
String[] textHierarchy = text.split(hierarchyDelimiter, -1);
|
||||
StringBuilder xpathExpressionBuilder = new StringBuilder();
|
||||
for (int i = 0; i < textHierarchy.length; i++) {
|
||||
xpathExpression += String.format(xpathTemplate, textHierarchy[i].replaceAll("'", "'").toLowerCase());
|
||||
xpathExpressionBuilder.append(String.format(xpathTemplate, "$var" + i));
|
||||
}
|
||||
String xpathExpression = xpathExpressionBuilder.toString();
|
||||
XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
int total = 0;
|
||||
List<Choice> choices = new ArrayList<Choice>();
|
||||
xpath.setXPathVariableResolver(variableName -> {
|
||||
String varName = variableName.getLocalPart();
|
||||
if (varName.startsWith("var")) {
|
||||
int index = Integer.parseInt(varName.substring(3));
|
||||
return textHierarchy[index].toLowerCase();
|
||||
}
|
||||
throw new IllegalArgumentException("Unexpected variable: " + varName);
|
||||
});
|
||||
int total;
|
||||
List<Choice> choices;
|
||||
try {
|
||||
NodeList results = (NodeList) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODESET);
|
||||
total = results.getLength();
|
||||
@@ -185,14 +196,23 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
@Override
|
||||
public Choices getBestMatch(String text, String locale) {
|
||||
init();
|
||||
log.debug("Getting best matches for '" + text + "'");
|
||||
String xpathExpression = "";
|
||||
log.debug("Getting best matches for {}'", text);
|
||||
String[] textHierarchy = text.split(hierarchyDelimiter, -1);
|
||||
StringBuilder xpathExpressionBuilder = new StringBuilder();
|
||||
for (int i = 0; i < textHierarchy.length; i++) {
|
||||
xpathExpression += String.format(labelTemplate, textHierarchy[i].replaceAll("'", "'"));
|
||||
xpathExpressionBuilder.append(String.format(labelTemplate, "$var" + i));
|
||||
}
|
||||
String xpathExpression = xpathExpressionBuilder.toString();
|
||||
XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
List<Choice> choices = new ArrayList<Choice>();
|
||||
xpath.setXPathVariableResolver(variableName -> {
|
||||
String varName = variableName.getLocalPart();
|
||||
if (varName.startsWith("var")) {
|
||||
int index = Integer.parseInt(varName.substring(3));
|
||||
return textHierarchy[index];
|
||||
}
|
||||
throw new IllegalArgumentException("Unexpected variable: " + varName);
|
||||
});
|
||||
List<Choice> choices;
|
||||
try {
|
||||
NodeList results = (NodeList) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODESET);
|
||||
choices = getChoicesFromNodeList(results, 0, 1);
|
||||
@@ -240,7 +260,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
@Override
|
||||
public Choices getChoicesByParent(String authorityName, String parentId, int start, int limit, String locale) {
|
||||
init();
|
||||
String xpathExpression = String.format(idTemplate, parentId);
|
||||
String xpathExpression = String.format(idTemplateQuoted, parentId);
|
||||
return getChoicesByXpath(xpathExpression, start, limit);
|
||||
}
|
||||
|
||||
@@ -264,15 +284,12 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
}
|
||||
|
||||
private boolean isRootElement(Node node) {
|
||||
if (node != null && node.getOwnerDocument().getDocumentElement().equals(node)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
return node != null && node.getOwnerDocument().getDocumentElement().equals(node);
|
||||
}
|
||||
|
||||
private Node getNode(String key) throws XPathExpressionException {
|
||||
init();
|
||||
String xpathExpression = String.format(idTemplate, key);
|
||||
String xpathExpression = String.format(idTemplateQuoted, key);
|
||||
Node node = getNodeFromXPath(xpathExpression);
|
||||
return node;
|
||||
}
|
||||
@@ -284,7 +301,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
}
|
||||
|
||||
private List<Choice> getChoicesFromNodeList(NodeList results, int start, int limit) {
|
||||
List<Choice> choices = new ArrayList<Choice>();
|
||||
List<Choice> choices = new ArrayList<>();
|
||||
for (int i = 0; i < results.getLength(); i++) {
|
||||
if (i < start) {
|
||||
continue;
|
||||
@@ -303,14 +320,14 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
|
||||
private Map<String, String> addOtherInformation(String parentCurr, String noteCurr,
|
||||
List<String> childrenCurr, String authorityCurr) {
|
||||
Map<String, String> extras = new HashMap<String, String>();
|
||||
Map<String, String> extras = new HashMap<>();
|
||||
if (StringUtils.isNotBlank(parentCurr)) {
|
||||
extras.put("parent", parentCurr);
|
||||
}
|
||||
if (StringUtils.isNotBlank(noteCurr)) {
|
||||
extras.put("note", noteCurr);
|
||||
}
|
||||
if (childrenCurr.size() > 0) {
|
||||
if (!childrenCurr.isEmpty()) {
|
||||
extras.put("hasChildren", "true");
|
||||
} else {
|
||||
extras.put("hasChildren", "false");
|
||||
@@ -368,7 +385,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
}
|
||||
|
||||
private List<String> getChildren(Node node) {
|
||||
List<String> children = new ArrayList<String>();
|
||||
List<String> children = new ArrayList<>();
|
||||
NodeList childNodes = node.getChildNodes();
|
||||
for (int ci = 0; ci < childNodes.getLength(); ci++) {
|
||||
Node firstChild = childNodes.item(ci);
|
||||
@@ -391,7 +408,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
private boolean isSelectable(Node node) {
|
||||
Node selectableAttr = node.getAttributes().getNamedItem("selectable");
|
||||
if (null != selectableAttr) {
|
||||
return Boolean.valueOf(selectableAttr.getNodeValue());
|
||||
return Boolean.parseBoolean(selectableAttr.getNodeValue());
|
||||
} else { // Default is true
|
||||
return true;
|
||||
}
|
||||
@@ -418,7 +435,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
}
|
||||
|
||||
private Choices getChoicesByXpath(String xpathExpression, int start, int limit) {
|
||||
List<Choice> choices = new ArrayList<Choice>();
|
||||
List<Choice> choices = new ArrayList<>();
|
||||
XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
try {
|
||||
Node parentNode = (Node) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODE);
|
||||
|
@@ -20,9 +20,7 @@ import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.BitstreamFormat;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.BitstreamFormatService;
|
||||
import org.dspace.content.service.BitstreamService;
|
||||
@@ -224,29 +222,17 @@ public class PREMISCrosswalk
|
||||
// c. made-up name based on sequence ID and extension.
|
||||
String sid = String.valueOf(bitstream.getSequenceID());
|
||||
String baseUrl = configurationService.getProperty("dspace.ui.url");
|
||||
String handle = null;
|
||||
// get handle of parent Item of this bitstream, if there is one:
|
||||
List<Bundle> bn = bitstream.getBundles();
|
||||
if (bn.size() > 0) {
|
||||
List<Item> bi = bn.get(0).getItems();
|
||||
if (bi.size() > 0) {
|
||||
handle = bi.get(0).getHandle();
|
||||
}
|
||||
}
|
||||
// get or make up name for bitstream:
|
||||
String bsName = bitstream.getName();
|
||||
if (bsName == null) {
|
||||
List<String> ext = bitstream.getFormat(context).getExtensions();
|
||||
bsName = "bitstream_" + sid + (ext.size() > 0 ? ext.get(0) : "");
|
||||
}
|
||||
if (handle != null && baseUrl != null) {
|
||||
if (baseUrl != null) {
|
||||
oiv.setText(baseUrl
|
||||
+ "/bitstream/"
|
||||
+ URLEncoder.encode(handle, "UTF-8")
|
||||
+ "/"
|
||||
+ sid
|
||||
+ "/"
|
||||
+ URLEncoder.encode(bsName, "UTF-8"));
|
||||
+ "/bitstreams/"
|
||||
+ bitstream.getID()
|
||||
+ "/download");
|
||||
} else {
|
||||
oiv.setText(URLEncoder.encode(bsName, "UTF-8"));
|
||||
}
|
||||
|
@@ -152,7 +152,7 @@ public class BitstreamDAOImpl extends AbstractHibernateDSODAO<Bitstream> impleme
|
||||
@Override
|
||||
public int countWithNoPolicy(Context context) throws SQLException {
|
||||
Query query = createQuery(context,
|
||||
"SELECT count(bit.id) from Bitstream bit where bit.deleted<>true and bit.id not in" +
|
||||
"SELECT count(bit.id) from Bitstream bit where bit.deleted<>true and bit not in" +
|
||||
" (select res.dSpaceObject from ResourcePolicy res where res.resourceTypeId = " +
|
||||
":typeId )");
|
||||
query.setParameter("typeId", Constants.BITSTREAM);
|
||||
|
@@ -12,6 +12,7 @@ import java.util.AbstractMap;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
import javax.persistence.Query;
|
||||
import javax.persistence.criteria.CriteriaBuilder;
|
||||
import javax.persistence.criteria.CriteriaQuery;
|
||||
@@ -19,6 +20,7 @@ import javax.persistence.criteria.Join;
|
||||
import javax.persistence.criteria.Predicate;
|
||||
import javax.persistence.criteria.Root;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.authorize.ResourcePolicy;
|
||||
import org.dspace.authorize.ResourcePolicy_;
|
||||
import org.dspace.content.Collection;
|
||||
@@ -40,6 +42,11 @@ import org.dspace.eperson.Group;
|
||||
* @author kevinvandevelde at atmire.com
|
||||
*/
|
||||
public class CollectionDAOImpl extends AbstractHibernateDSODAO<Collection> implements CollectionDAO {
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(CollectionDAOImpl.class);
|
||||
|
||||
protected CollectionDAOImpl() {
|
||||
super();
|
||||
}
|
||||
@@ -159,7 +166,7 @@ public class CollectionDAOImpl extends AbstractHibernateDSODAO<Collection> imple
|
||||
|
||||
@Override
|
||||
public List<Collection> findCollectionsWithSubscribers(Context context) throws SQLException {
|
||||
return list(createQuery(context, "SELECT DISTINCT c FROM Collection c JOIN Subscription s ON c.id = " +
|
||||
return list(createQuery(context, "SELECT DISTINCT c FROM Collection c JOIN Subscription s ON c = " +
|
||||
"s.dSpaceObject"));
|
||||
}
|
||||
|
||||
@@ -172,14 +179,25 @@ public class CollectionDAOImpl extends AbstractHibernateDSODAO<Collection> imple
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<Map.Entry<Collection, Long>> getCollectionsWithBitstreamSizesTotal(Context context)
|
||||
throws SQLException {
|
||||
String q = "select col as collection, sum(bit.sizeBytes) as totalBytes from Item i join i.collections col " +
|
||||
"join i.bundles bun join bun.bitstreams bit group by col";
|
||||
String q = "select col.id, sum(bit.sizeBytes) as totalBytes from Item i join i.collections col " +
|
||||
"join i.bundles bun join bun.bitstreams bit group by col.id";
|
||||
Query query = createQuery(context, q);
|
||||
|
||||
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||
|
||||
List<Object[]> list = query.getResultList();
|
||||
List<Map.Entry<Collection, Long>> returnList = new ArrayList<>(list.size());
|
||||
for (Object[] o : list) {
|
||||
returnList.add(new AbstractMap.SimpleEntry<>((Collection) o[0], (Long) o[1]));
|
||||
CriteriaQuery<Collection> criteriaQuery = criteriaBuilder.createQuery(Collection.class);
|
||||
Root<Collection> collectionRoot = criteriaQuery.from(Collection.class);
|
||||
criteriaQuery.select(collectionRoot).where(criteriaBuilder.equal(collectionRoot.get("id"), (UUID) o[0]));
|
||||
Query collectionQuery = createQuery(context, criteriaQuery);
|
||||
Collection collection = (Collection) collectionQuery.getSingleResult();
|
||||
if (collection != null) {
|
||||
returnList.add(new AbstractMap.SimpleEntry<>(collection, (Long) o[1]));
|
||||
} else {
|
||||
log.warn("Unable to find Collection with UUID: {}", o[0]);
|
||||
}
|
||||
}
|
||||
return returnList;
|
||||
}
|
||||
|
@@ -498,8 +498,11 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester {
|
||||
// Finish creating the item. This actually assigns the handle,
|
||||
// and will either install item immediately or start a workflow, based on params
|
||||
PackageUtils.finishCreateItem(context, wsi, handle, params);
|
||||
} else {
|
||||
// We should have a workspace item during ingest, so this code is only here for safety.
|
||||
// Update the object to make sure all changes are committed
|
||||
PackageUtils.updateDSpaceObject(context, dso);
|
||||
}
|
||||
|
||||
} else if (type == Constants.COLLECTION || type == Constants.COMMUNITY) {
|
||||
// Add logo if one is referenced from manifest
|
||||
addContainerLogo(context, dso, manifest, pkgFile, params);
|
||||
@@ -513,6 +516,9 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester {
|
||||
// (this allows subclasses to do some final validation / changes as
|
||||
// necessary)
|
||||
finishObject(context, dso, params);
|
||||
|
||||
// Update the object to make sure all changes are committed
|
||||
PackageUtils.updateDSpaceObject(context, dso);
|
||||
} else if (type == Constants.SITE) {
|
||||
// Do nothing by default -- Crosswalks will handle anything necessary to ingest at Site-level
|
||||
|
||||
@@ -520,18 +526,15 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester {
|
||||
// (this allows subclasses to do some final validation / changes as
|
||||
// necessary)
|
||||
finishObject(context, dso, params);
|
||||
|
||||
// Update the object to make sure all changes are committed
|
||||
PackageUtils.updateDSpaceObject(context, dso);
|
||||
} else {
|
||||
throw new PackageValidationException(
|
||||
"Unknown DSpace Object type in package, type="
|
||||
+ String.valueOf(type));
|
||||
}
|
||||
|
||||
// -- Step 6 --
|
||||
// Finish things up!
|
||||
|
||||
// Update the object to make sure all changes are committed
|
||||
PackageUtils.updateDSpaceObject(context, dso);
|
||||
|
||||
return dso;
|
||||
}
|
||||
|
||||
|
@@ -458,7 +458,20 @@ public abstract class AbstractHibernateDAO<T> implements GenericDAO<T> {
|
||||
for (Map.Entry<String, Object> entry : equals.entrySet()) {
|
||||
criteria.where(criteriaBuilder.equal(root.get(entry.getKey()), entry.getValue()));
|
||||
}
|
||||
|
||||
criteria.orderBy(criteriaBuilder.asc(root.get("id")));
|
||||
|
||||
return executeCriteriaQuery(context, criteria, cacheable, maxResults, offset);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a Query object from a CriteriaQuery
|
||||
* @param context current Context
|
||||
* @param criteriaQuery CriteriaQuery built via CriteriaBuilder
|
||||
* @return corresponding Query
|
||||
* @throws SQLException if error occurs
|
||||
*/
|
||||
public Query createQuery(Context context, CriteriaQuery criteriaQuery) throws SQLException {
|
||||
return this.getHibernateSession(context).createQuery(criteriaQuery);
|
||||
}
|
||||
}
|
||||
|
@@ -140,7 +140,7 @@ public class CitationDocumentServiceImpl implements CitationDocumentService, Ini
|
||||
//Load enabled collections
|
||||
String[] citationEnabledCollections = configurationService
|
||||
.getArrayProperty("citation-page.enabled_collections");
|
||||
citationEnabledCollectionsList = Arrays.asList(citationEnabledCollections);
|
||||
citationEnabledCollectionsList = new ArrayList<String>(Arrays.asList(citationEnabledCollections));
|
||||
|
||||
//Load enabled communities, and add to collection-list
|
||||
String[] citationEnabledCommunities = configurationService
|
||||
|
@@ -373,7 +373,7 @@ public class EPerson extends DSpaceObject implements DSpaceObjectLegacySupport {
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return getEmail();
|
||||
return this.getFullName();
|
||||
}
|
||||
|
||||
String getDigestAlgorithm() {
|
||||
|
@@ -50,26 +50,26 @@ public class UserCheck extends Check {
|
||||
info.put("Self registered", 0);
|
||||
|
||||
for (EPerson e : epersons) {
|
||||
if (e.getEmail() != null && e.getEmail().length() > 0) {
|
||||
if (e.getEmail() != null && !e.getEmail().isEmpty()) {
|
||||
info.put("Have email", info.get("Have email") + 1);
|
||||
}
|
||||
if (e.canLogIn()) {
|
||||
info.put("Can log in (password)",
|
||||
info.get("Can log in (password)") + 1);
|
||||
}
|
||||
if (e.getFirstName() != null && e.getFirstName().length() > 0) {
|
||||
if (e.getFirstName() != null && !e.getFirstName().isEmpty()) {
|
||||
info.put("Have 1st name", info.get("Have 1st name") + 1);
|
||||
}
|
||||
if (e.getLastName() != null && e.getLastName().length() > 0) {
|
||||
if (e.getLastName() != null && !e.getLastName().isEmpty()) {
|
||||
info.put("Have 2nd name", info.get("Have 2nd name") + 1);
|
||||
}
|
||||
if (e.getLanguage() != null && e.getLanguage().length() > 0) {
|
||||
if (e.getLanguage() != null && !e.getLanguage().isEmpty()) {
|
||||
info.put("Have lang", info.get("Have lang") + 1);
|
||||
}
|
||||
if (e.getNetid() != null && e.getNetid().length() > 0) {
|
||||
if (e.getNetid() != null && !e.getNetid().isEmpty()) {
|
||||
info.put("Have netid", info.get("Have netid") + 1);
|
||||
}
|
||||
if (e.getNetid() != null && e.getNetid().length() > 0) {
|
||||
if (e.getNetid() != null && !e.getNetid().isEmpty()) {
|
||||
info.put("Self registered", info.get("Self registered") + 1);
|
||||
}
|
||||
}
|
||||
|
@@ -105,4 +105,13 @@ public class MetadatumDTO {
|
||||
public void setValue(String value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return string representation of MetadatumDTO
|
||||
* @return string representation of format "[schema].[element].[qualifier]=[value]"
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return schema + "." + element + "." + qualifier + "=" + value;
|
||||
}
|
||||
}
|
||||
|
@@ -55,6 +55,7 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
|
||||
private String urlFetch;
|
||||
private String urlSearch;
|
||||
private String apiKey;
|
||||
|
||||
private int attempt = 3;
|
||||
|
||||
@@ -210,6 +211,9 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
URIBuilder uriBuilder = new URIBuilder(urlSearch);
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder.addParameter("api_key", apiKey);
|
||||
}
|
||||
uriBuilder.addParameter("db", "pubmed");
|
||||
uriBuilder.addParameter("term", query.getParameterAsClass("query", String.class));
|
||||
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
|
||||
@@ -286,6 +290,9 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
List<ImportRecord> records = new LinkedList<ImportRecord>();
|
||||
|
||||
URIBuilder uriBuilder = new URIBuilder(urlSearch);
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder.addParameter("api_key", apiKey);
|
||||
}
|
||||
uriBuilder.addParameter("db", "pubmed");
|
||||
uriBuilder.addParameter("retstart", start.toString());
|
||||
uriBuilder.addParameter("retmax", count.toString());
|
||||
@@ -316,6 +323,9 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
String webEnv = getSingleElementValue(response, "WebEnv");
|
||||
|
||||
URIBuilder uriBuilder2 = new URIBuilder(urlFetch);
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder2.addParameter("api_key", apiKey);
|
||||
}
|
||||
uriBuilder2.addParameter("db", "pubmed");
|
||||
uriBuilder2.addParameter("retstart", start.toString());
|
||||
uriBuilder2.addParameter("retmax", count.toString());
|
||||
@@ -388,6 +398,9 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
public ImportRecord call() throws Exception {
|
||||
|
||||
URIBuilder uriBuilder = new URIBuilder(urlFetch);
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder.addParameter("api_key", apiKey);
|
||||
}
|
||||
uriBuilder.addParameter("db", "pubmed");
|
||||
uriBuilder.addParameter("retmode", "xml");
|
||||
uriBuilder.addParameter("id", query.getParameterAsClass("id", String.class));
|
||||
@@ -428,6 +441,9 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
public Collection<ImportRecord> call() throws Exception {
|
||||
|
||||
URIBuilder uriBuilder = new URIBuilder(urlSearch);
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder.addParameter("api_key", apiKey);
|
||||
}
|
||||
uriBuilder.addParameter("db", "pubmed");
|
||||
uriBuilder.addParameter("usehistory", "y");
|
||||
uriBuilder.addParameter("term", query.getParameterAsClass("term", String.class));
|
||||
@@ -457,6 +473,9 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
String queryKey = getSingleElementValue(response, "QueryKey");
|
||||
|
||||
URIBuilder uriBuilder2 = new URIBuilder(urlFetch);
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder.addParameter("api_key", apiKey);
|
||||
}
|
||||
uriBuilder2.addParameter("db", "pubmed");
|
||||
uriBuilder2.addParameter("retmode", "xml");
|
||||
uriBuilder2.addParameter("WebEnv", webEnv);
|
||||
@@ -532,4 +551,8 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
this.urlSearch = urlSearch;
|
||||
}
|
||||
|
||||
public void setApiKey(String apiKey) {
|
||||
this.apiKey = apiKey;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -39,6 +39,7 @@ public class OrcidWorkFieldMapping {
|
||||
* The metadata fields related to the work external identifiers.
|
||||
*/
|
||||
private Map<String, String> externalIdentifierFields = new HashMap<>();
|
||||
private Map<String, List<String>> externalIdentifierPartOfMap = new HashMap<>();
|
||||
|
||||
/**
|
||||
* The metadata field related to the work publication date.
|
||||
@@ -129,6 +130,15 @@ public class OrcidWorkFieldMapping {
|
||||
this.externalIdentifierFields = parseConfigurations(externalIdentifierFields);
|
||||
}
|
||||
|
||||
public Map<String, List<String>> getExternalIdentifierPartOfMap() {
|
||||
return this.externalIdentifierPartOfMap;
|
||||
}
|
||||
|
||||
public void setExternalIdentifierPartOfMap(
|
||||
HashMap<String, List<String>> externalIdentifierPartOfMap) {
|
||||
this.externalIdentifierPartOfMap = externalIdentifierPartOfMap;
|
||||
}
|
||||
|
||||
public String getPublicationDateField() {
|
||||
return publicationDateField;
|
||||
}
|
||||
|
@@ -9,6 +9,7 @@ package org.dspace.orcid.model.factory.impl;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
import static org.orcid.jaxb.model.common.Relationship.PART_OF;
|
||||
import static org.orcid.jaxb.model.common.Relationship.SELF;
|
||||
|
||||
import java.util.ArrayList;
|
||||
@@ -73,12 +74,12 @@ public class OrcidWorkFactory implements OrcidEntityFactory {
|
||||
@Override
|
||||
public Activity createOrcidObject(Context context, Item item) {
|
||||
Work work = new Work();
|
||||
work.setWorkType(getWorkType(context, item));
|
||||
work.setJournalTitle(getJournalTitle(context, item));
|
||||
work.setWorkContributors(getWorkContributors(context, item));
|
||||
work.setWorkTitle(getWorkTitle(context, item));
|
||||
work.setPublicationDate(getPublicationDate(context, item));
|
||||
work.setWorkExternalIdentifiers(getWorkExternalIds(context, item));
|
||||
work.setWorkType(getWorkType(context, item));
|
||||
work.setWorkExternalIdentifiers(getWorkExternalIds(context, item, work));
|
||||
work.setShortDescription(getShortDescription(context, item));
|
||||
work.setLanguageCode(getLanguageCode(context, item));
|
||||
work.setUrl(getUrl(context, item));
|
||||
@@ -149,63 +150,71 @@ public class OrcidWorkFactory implements OrcidEntityFactory {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an instance of ExternalIDs from the metadata values of the given
|
||||
* item, using the orcid.mapping.funding.external-ids configuration.
|
||||
* Returns a list of external work IDs constructed in the org.orcid.jaxb
|
||||
* ExternalIDs object
|
||||
*/
|
||||
private ExternalIDs getWorkExternalIds(Context context, Item item) {
|
||||
ExternalIDs externalIdentifiers = new ExternalIDs();
|
||||
externalIdentifiers.getExternalIdentifier().addAll(getWorkSelfExternalIds(context, item));
|
||||
return externalIdentifiers;
|
||||
private ExternalIDs getWorkExternalIds(Context context, Item item, Work work) {
|
||||
ExternalIDs externalIDs = new ExternalIDs();
|
||||
externalIDs.getExternalIdentifier().addAll(getWorkExternalIdList(context, item, work));
|
||||
return externalIDs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a list of ExternalID, one for orcid.mapping.funding.external-ids
|
||||
* value, taking the values from the given item.
|
||||
* value, taking the values from the given item and work type.
|
||||
*/
|
||||
private List<ExternalID> getWorkSelfExternalIds(Context context, Item item) {
|
||||
private List<ExternalID> getWorkExternalIdList(Context context, Item item, Work work) {
|
||||
|
||||
List<ExternalID> selfExternalIds = new ArrayList<ExternalID>();
|
||||
List<ExternalID> externalIds = new ArrayList<>();
|
||||
|
||||
Map<String, String> externalIdentifierFields = fieldMapping.getExternalIdentifierFields();
|
||||
|
||||
if (externalIdentifierFields.containsKey(SIMPLE_HANDLE_PLACEHOLDER)) {
|
||||
String handleType = externalIdentifierFields.get(SIMPLE_HANDLE_PLACEHOLDER);
|
||||
selfExternalIds.add(getExternalId(handleType, item.getHandle(), SELF));
|
||||
ExternalID handle = new ExternalID();
|
||||
handle.setType(handleType);
|
||||
handle.setValue(item.getHandle());
|
||||
handle.setRelationship(SELF);
|
||||
externalIds.add(handle);
|
||||
}
|
||||
|
||||
// Resolve work type, used to determine identifier relationship type
|
||||
// For version / funding relationships, we might want to use more complex
|
||||
// business rules than just "work and id type"
|
||||
final String workType = (work != null && work.getWorkType() != null) ?
|
||||
work.getWorkType().value() : WorkType.OTHER.value();
|
||||
getMetadataValues(context, item, externalIdentifierFields.keySet()).stream()
|
||||
.map(this::getSelfExternalId)
|
||||
.forEach(selfExternalIds::add);
|
||||
.map(metadataValue -> this.getExternalId(metadataValue, workType))
|
||||
.forEach(externalIds::add);
|
||||
|
||||
return selfExternalIds;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an instance of ExternalID taking the value from the given
|
||||
* metadataValue. The type of the ExternalID is calculated using the
|
||||
* orcid.mapping.funding.external-ids configuration. The relationship of the
|
||||
* ExternalID is SELF.
|
||||
*/
|
||||
private ExternalID getSelfExternalId(MetadataValue metadataValue) {
|
||||
Map<String, String> externalIdentifierFields = fieldMapping.getExternalIdentifierFields();
|
||||
String metadataField = metadataValue.getMetadataField().toString('.');
|
||||
return getExternalId(externalIdentifierFields.get(metadataField), metadataValue.getValue(), SELF);
|
||||
return externalIds;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an instance of ExternalID with the given type, value and
|
||||
* relationship.
|
||||
*/
|
||||
private ExternalID getExternalId(String type, String value, Relationship relationship) {
|
||||
private ExternalID getExternalId(MetadataValue metadataValue, String workType) {
|
||||
Map<String, String> externalIdentifierFields = fieldMapping.getExternalIdentifierFields();
|
||||
Map<String, List<String>> externalIdentifierPartOfMap = fieldMapping.getExternalIdentifierPartOfMap();
|
||||
String metadataField = metadataValue.getMetadataField().toString('.');
|
||||
String identifierType = externalIdentifierFields.get(metadataField);
|
||||
// Default relationship type is SELF, configuration can
|
||||
// override to PART_OF based on identifier and work type
|
||||
Relationship relationship = SELF;
|
||||
if (externalIdentifierPartOfMap.containsKey(identifierType)
|
||||
&& externalIdentifierPartOfMap.get(identifierType).contains(workType)) {
|
||||
relationship = PART_OF;
|
||||
}
|
||||
ExternalID externalID = new ExternalID();
|
||||
externalID.setType(type);
|
||||
externalID.setValue(value);
|
||||
externalID.setType(identifierType);
|
||||
externalID.setValue(metadataValue.getValue());
|
||||
externalID.setRelationship(relationship);
|
||||
return externalID;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an instance of WorkType from the given item, taking the value fom the
|
||||
* Creates an instance of WorkType from the given item, taking the value from the
|
||||
* configured metadata field (orcid.mapping.work.type).
|
||||
*/
|
||||
private WorkType getWorkType(Context context, Item item) {
|
||||
|
@@ -283,6 +283,8 @@ public class ResearcherProfileServiceImpl implements ResearcherProfileService {
|
||||
itemService.addMetadata(context, item, "dc", "title", null, null, fullName);
|
||||
itemService.addMetadata(context, item, "person", "email", null, null, ePerson.getEmail());
|
||||
itemService.addMetadata(context, item, "dspace", "object", "owner", null, fullName, id, CF_ACCEPTED);
|
||||
itemService.addMetadata(context, item, "person", "familyName", null, null, ePerson.getLastName());
|
||||
itemService.addMetadata(context, item, "person", "givenName", null, null, ePerson.getFirstName());
|
||||
|
||||
item = installItemService.installItem(context, workspaceItem);
|
||||
|
||||
|
@@ -117,7 +117,7 @@ public abstract class DSpaceRunnable<T extends ScriptConfiguration> implements R
|
||||
* @param args The primitive array of Strings representing the parameters
|
||||
* @throws ParseException If something goes wrong
|
||||
*/
|
||||
private StepResult parse(String[] args) throws ParseException {
|
||||
protected StepResult parse(String[] args) throws ParseException {
|
||||
commandLine = new DefaultParser().parse(getScriptConfiguration().getOptions(), args);
|
||||
setup();
|
||||
return StepResult.Continue;
|
||||
|
@@ -28,6 +28,7 @@ import java.text.DateFormat;
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.EnumSet;
|
||||
@@ -232,6 +233,10 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
if (dspaceObject instanceof Bitstream && !isBitstreamLoggable((Bitstream) dspaceObject)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (solr == null) {
|
||||
return;
|
||||
}
|
||||
@@ -279,6 +284,10 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
|
||||
@Override
|
||||
public void postView(DSpaceObject dspaceObject,
|
||||
String ip, String userAgent, String xforwardedfor, EPerson currentUser, String referrer) {
|
||||
if (dspaceObject instanceof Bitstream && !isBitstreamLoggable((Bitstream) dspaceObject)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (solr == null) {
|
||||
return;
|
||||
}
|
||||
@@ -1713,4 +1722,35 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
|
||||
|
||||
throw new UnknownHostException("unknown ip format");
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a given Bitstream's bundles are configured to be logged in Solr statistics.
|
||||
*
|
||||
* @param bitstream The bitstream to check.
|
||||
* @return {@code true} if the bitstream event should be logged, {@code false} otherwise.
|
||||
*/
|
||||
private boolean isBitstreamLoggable(Bitstream bitstream) {
|
||||
String[] allowedBundles = configurationService
|
||||
.getArrayProperty("solr-statistics.query.filter.bundles");
|
||||
if (allowedBundles == null || allowedBundles.length == 0) {
|
||||
return true;
|
||||
}
|
||||
List<String> allowedBundlesList = Arrays.asList(allowedBundles);
|
||||
try {
|
||||
List<Bundle> actualBundles = bitstream.getBundles();
|
||||
if (actualBundles.isEmpty()) {
|
||||
return true;
|
||||
}
|
||||
for (Bundle bundle : actualBundles) {
|
||||
if (allowedBundlesList.contains(bundle.getName())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
log.error("Error checking bitstream bundles for logging statistics for bitstream {}",
|
||||
bitstream.getID(), e);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@@ -136,9 +136,10 @@ public abstract class ExportEventProcessor {
|
||||
.append(URLEncoder.encode(clientUA, UTF_8));
|
||||
|
||||
String hostName = Utils.getHostName(configurationService.getProperty("dspace.ui.url"));
|
||||
String oaiPrefix = configurationService.getProperty("oai.identifier.prefix");
|
||||
|
||||
data.append("&").append(URLEncoder.encode("rft.artnum", UTF_8)).append("=").
|
||||
append(URLEncoder.encode("oai:" + hostName + ":" + item
|
||||
append(URLEncoder.encode("oai:" + oaiPrefix + ":" + item
|
||||
.getHandle(), UTF_8));
|
||||
data.append("&").append(URLEncoder.encode("rfr_dat", UTF_8)).append("=")
|
||||
.append(URLEncoder.encode(referer, UTF_8));
|
||||
|
@@ -423,7 +423,7 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
|
||||
//modulo
|
||||
if ((processedCounter % batchCommitSize) == 0) {
|
||||
log.info("Migration Commit Checkpoint: " + processedCounter);
|
||||
context.dispatchEvents();
|
||||
context.commit();
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -19,9 +19,11 @@ import java.security.NoSuchAlgorithmException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.core.Utils;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* Native DSpace (or "Directory Scatter" if you prefer) asset store.
|
||||
@@ -252,7 +254,10 @@ public class DSBitStoreService extends BaseBitStoreService {
|
||||
}
|
||||
File bitstreamFile = new File(bufFilename.toString());
|
||||
Path normalizedPath = bitstreamFile.toPath().normalize();
|
||||
if (!normalizedPath.startsWith(baseDir.getAbsolutePath())) {
|
||||
String[] allowedAssetstoreRoots = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||
.getArrayProperty("assetstore.allowed.roots", new String[]{});
|
||||
if (!normalizedPath.startsWith(baseDir.getAbsolutePath())
|
||||
&& !StringUtils.startsWithAny(normalizedPath.toString(), allowedAssetstoreRoots)) {
|
||||
log.error("Bitstream path outside of assetstore root requested:" +
|
||||
"bitstream={}, path={}, assetstore={}",
|
||||
bitstream.getID(), normalizedPath, baseDir.getAbsolutePath());
|
||||
|
@@ -67,6 +67,7 @@ public class SubscriptionEmailNotificationServiceImpl implements SubscriptionEma
|
||||
public void perform(Context context, DSpaceRunnableHandler handler, String subscriptionType, String frequency) {
|
||||
List<IndexableObject> communityItems = new ArrayList<>();
|
||||
List<IndexableObject> collectionsItems = new ArrayList<>();
|
||||
EPerson currentEperson = context.getCurrentUser();
|
||||
try {
|
||||
List<Subscription> subscriptions =
|
||||
findAllSubscriptionsBySubscriptionTypeAndFrequency(context, subscriptionType, frequency);
|
||||
@@ -77,7 +78,10 @@ public class SubscriptionEmailNotificationServiceImpl implements SubscriptionEma
|
||||
for (Subscription subscription : subscriptions) {
|
||||
DSpaceObject dSpaceObject = subscription.getDSpaceObject();
|
||||
EPerson ePerson = subscription.getEPerson();
|
||||
|
||||
// Set the current user to the subscribed eperson because the Solr query checks
|
||||
// the permissions of the current user in the ANONYMOUS group.
|
||||
// If there is no user (i.e., `current user = null`), it will send an email with no new items.
|
||||
context.setCurrentUser(ePerson);
|
||||
if (!authorizeService.authorizeActionBoolean(context, ePerson, dSpaceObject, READ, true)) {
|
||||
iterator++;
|
||||
continue;
|
||||
@@ -126,6 +130,8 @@ public class SubscriptionEmailNotificationServiceImpl implements SubscriptionEma
|
||||
handler.handleException(e);
|
||||
context.abort();
|
||||
}
|
||||
// Reset the current user because it was changed to subscriber eperson
|
||||
context.setCurrentUser(currentEperson);
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
|
@@ -13,6 +13,7 @@ import java.time.LocalDate;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.LocalTime;
|
||||
import java.time.ZoneId;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.time.format.DateTimeFormatterBuilder;
|
||||
@@ -107,7 +108,7 @@ public class DateMathParser {
|
||||
|
||||
private static final Logger LOG = LogManager.getLogger();
|
||||
|
||||
public static final TimeZone UTC = TimeZone.getTimeZone("UTC");
|
||||
public static final TimeZone UTC = TimeZone.getTimeZone(ZoneOffset.UTC);
|
||||
|
||||
/**
|
||||
* Default TimeZone for DateMath rounding (UTC)
|
||||
|
@@ -56,6 +56,7 @@
|
||||
<property name="metadataFieldMapping" ref="pubmedMetadataFieldMapping"/>
|
||||
<property name="urlFetch" value="${pubmed.url.fetch}"/>
|
||||
<property name="urlSearch" value="${pubmed.url.search}"/>
|
||||
<property name="apiKey" value="${pubmed.apiKey}"/>
|
||||
<property name="generateQueryForItem" ref="pubmedService"></property>
|
||||
<property name="interRequestTime" value="500"/>
|
||||
<property name="supportedExtensions">
|
||||
|
@@ -23,7 +23,6 @@
|
||||
<property name="minimumAcceptanceScore" value="3" />
|
||||
</bean>
|
||||
|
||||
|
||||
<bean id="autoassignactionAPI" class="org.dspace.xmlworkflow.state.actions.userassignment.AutoAssignAction" scope="prototype"/>
|
||||
<bean id="noUserSelectionActionAPI" class="org.dspace.xmlworkflow.state.actions.userassignment.NoUserSelectionAction" scope="prototype"/>
|
||||
<bean id="assignoriginalsubmitteractionAPI" class="org.dspace.xmlworkflow.state.actions.userassignment.AssignOriginalSubmitterAction" scope="prototype"/>
|
||||
@@ -46,7 +45,6 @@
|
||||
<property name="requiresUI" value="true"/>
|
||||
</bean>
|
||||
|
||||
|
||||
<!--Action for the select single reviewer workflow -->
|
||||
<bean id="selectrevieweraction" class="org.dspace.xmlworkflow.state.actions.WorkflowActionConfig" scope="prototype">
|
||||
<constructor-arg type="java.lang.String" value="selectrevieweraction"/>
|
||||
@@ -66,21 +64,14 @@
|
||||
<property name="requiresUI" value="true"/>
|
||||
</bean>
|
||||
|
||||
<bean id="ratingreviewaction" class="org.dspace.xmlworkflow.state.actions.WorkflowActionConfig" scope="prototype">
|
||||
<constructor-arg type="java.lang.String" value="ratingreviewaction"/>
|
||||
<property name="processingAction" ref="ratingreviewactionAPI" />
|
||||
<property name="requiresUI" value="true"/>
|
||||
</bean>
|
||||
|
||||
<!--Autmatic step that evaluates scores (workflow.score) and checks if they match the configured minimum for archiving -->
|
||||
<!--Automatic step that evaluates scores (workflow.score) and checks if they match the configured minimum for archiving -->
|
||||
<bean id="evaluationaction" class="org.dspace.xmlworkflow.state.actions.WorkflowActionConfig" scope="prototype">
|
||||
<constructor-arg type="java.lang.String" value="evaluationaction"/>
|
||||
<property name="processingAction" ref="evaluationactionAPI" />
|
||||
<property name="requiresUI" value="false"/>
|
||||
</bean>
|
||||
|
||||
|
||||
<!--User selection actions-->
|
||||
<!--User selection actions-->
|
||||
<bean id="claimaction" class="org.dspace.xmlworkflow.state.actions.UserSelectionActionConfig" scope="prototype">
|
||||
<constructor-arg type="java.lang.String" value="claimaction"/>
|
||||
<property name="processingAction" ref="claimactionAPI"/>
|
||||
|
@@ -12,6 +12,7 @@ import static org.junit.Assert.fail;
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.sql.SQLException;
|
||||
import java.time.ZoneOffset;
|
||||
import java.util.Properties;
|
||||
import java.util.TimeZone;
|
||||
|
||||
@@ -73,8 +74,10 @@ public class AbstractDSpaceIntegrationTest {
|
||||
//Stops System.exit(0) throws exception instead of exitting
|
||||
System.setSecurityManager(new NoExitSecurityManager());
|
||||
|
||||
//set a standard time zone for the tests
|
||||
TimeZone.setDefault(TimeZone.getTimeZone("Europe/Dublin"));
|
||||
// All tests should assume UTC timezone by default (unless overridden in the test itself)
|
||||
// This ensures that Spring doesn't attempt to change the timezone of dates that are read from the
|
||||
// database (via Hibernate). We store all dates in the database as UTC.
|
||||
TimeZone.setDefault(TimeZone.getTimeZone(ZoneOffset.UTC));
|
||||
|
||||
//load the properties of the tests
|
||||
testProps = new Properties();
|
||||
|
@@ -12,6 +12,7 @@ import static org.junit.Assert.fail;
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.sql.SQLException;
|
||||
import java.time.ZoneOffset;
|
||||
import java.util.Properties;
|
||||
import java.util.TimeZone;
|
||||
|
||||
@@ -82,8 +83,10 @@ public class AbstractDSpaceTest {
|
||||
@BeforeClass
|
||||
public static void initKernel() {
|
||||
try {
|
||||
//set a standard time zone for the tests
|
||||
TimeZone.setDefault(TimeZone.getTimeZone("Europe/Dublin"));
|
||||
// All tests should assume UTC timezone by default (unless overridden in the test itself)
|
||||
// This ensures that Spring doesn't attempt to change the timezone of dates that are read from the
|
||||
// database (via Hibernate). We store all dates in the database as UTC.
|
||||
TimeZone.setDefault(TimeZone.getTimeZone(ZoneOffset.UTC));
|
||||
|
||||
//load the properties of the tests
|
||||
testProps = new Properties();
|
||||
|
@@ -251,4 +251,35 @@ public class MetadataExportSearchIT extends AbstractIntegrationTestWithDatabase
|
||||
assertNotNull(exception);
|
||||
assertEquals("nonExisting is not a valid search filter", exception.getMessage());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void exportMetadataSearchDoubleQuotedArgumentTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
Item quotedItem1 = ItemBuilder.createItem(context, collection)
|
||||
.withTitle("The Special Runnable Item")
|
||||
.withSubject("quoted-subject")
|
||||
.build();
|
||||
Item quotedItem2 = ItemBuilder.createItem(context, collection)
|
||||
.withTitle("The Special Item")
|
||||
.withSubject("quoted-subject")
|
||||
.build();
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
int result = runDSpaceScript(
|
||||
"metadata-export-search",
|
||||
"-q", "title:\"Special Runnable\"",
|
||||
"-n", filename);
|
||||
|
||||
assertEquals(0, result);
|
||||
|
||||
Item[] expectedResult = new Item[] {quotedItem1};
|
||||
checkItemsPresentInFile(filename, expectedResult);
|
||||
|
||||
File file = new File(filename);
|
||||
try (Reader reader = Files.newReader(file, Charset.defaultCharset());
|
||||
CSVReader csvReader = new CSVReader(reader)) {
|
||||
List<String[]> lines = csvReader.readAll();
|
||||
assertEquals("Unexpected extra items in export", 2, lines.size());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -17,6 +17,7 @@ import static org.mockito.Mockito.verifyNoInteractions;
|
||||
import static org.mockito.Mockito.verifyNoMoreInteractions;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
@@ -107,7 +108,14 @@ public class DSpaceHttpClientFactoryTest {
|
||||
|
||||
@Test
|
||||
public void testBuildWithProxyConfiguredAndHostPrefixToIgnoreSet() throws Exception {
|
||||
setHttpProxyOnConfigurationService("local*", "www.test.com");
|
||||
// Get hostname assigned to 127.0.0.1 (usually is "localhost", but not always)
|
||||
InetAddress address = InetAddress.getByAddress(new byte[]{127, 0, 0, 1});
|
||||
String hostname = address.getHostName();
|
||||
// Take first 4 characters hostname as the prefix (e.g. "loca" in "localhost")
|
||||
String hostnamePrefix = hostname.substring(0, 4);
|
||||
// Save hostname prefix to our list of hosts to ignore, followed by an asterisk.
|
||||
// (This should result in our Proxy ignoring our localhost)
|
||||
setHttpProxyOnConfigurationService(hostnamePrefix + "*", "www.test.com");
|
||||
CloseableHttpClient httpClient = httpClientFactory.build();
|
||||
assertThat(mockProxy.getRequestCount(), is(0));
|
||||
assertThat(mockServer.getRequestCount(), is(0));
|
||||
@@ -122,7 +130,14 @@ public class DSpaceHttpClientFactoryTest {
|
||||
|
||||
@Test
|
||||
public void testBuildWithProxyConfiguredAndHostSuffixToIgnoreSet() throws Exception {
|
||||
setHttpProxyOnConfigurationService("www.test.com", "*host");
|
||||
// Get hostname assigned to 127.0.0.1 (usually is "localhost", but not always)
|
||||
InetAddress address = InetAddress.getByAddress(new byte[]{127, 0, 0, 1});
|
||||
String hostname = address.getHostName();
|
||||
// Take last 4 characters hostname as the suffix (e.g. "host" in "localhost")
|
||||
String hostnameSuffix = hostname.substring(hostname.length() - 4);
|
||||
// Save hostname suffix to our list of hosts to ignore, preceded by an asterisk.
|
||||
// (This should result in our Proxy ignoring our localhost)
|
||||
setHttpProxyOnConfigurationService("www.test.com", "*" + hostnameSuffix);
|
||||
CloseableHttpClient httpClient = httpClientFactory.build();
|
||||
assertThat(mockProxy.getRequestCount(), is(0));
|
||||
assertThat(mockServer.getRequestCount(), is(0));
|
||||
|
@@ -112,6 +112,14 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder<Item> {
|
||||
return addMetadataValue(item, "dc", "identifier", "scopus", scopus);
|
||||
}
|
||||
|
||||
public ItemBuilder withISSN(String issn) {
|
||||
return addMetadataValue(item, "dc", "identifier", "issn", issn);
|
||||
}
|
||||
|
||||
public ItemBuilder withISBN(String isbn) {
|
||||
return addMetadataValue(item, "dc", "identifier", "isbn", isbn);
|
||||
}
|
||||
|
||||
public ItemBuilder withRelationFunding(String funding) {
|
||||
return addMetadataValue(item, "dc", "relation", "funding", funding);
|
||||
}
|
||||
|
@@ -0,0 +1,193 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.checker;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.time.Instant;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.dspace.AbstractIntegrationTestWithDatabase;
|
||||
import org.dspace.builder.BitstreamBuilder;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.builder.ItemBuilder;
|
||||
import org.dspace.checker.factory.CheckerServiceFactory;
|
||||
import org.dspace.checker.service.ChecksumHistoryService;
|
||||
import org.dspace.checker.service.MostRecentChecksumService;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
public class ChecksumCheckerIT extends AbstractIntegrationTestWithDatabase {
|
||||
protected List<Bitstream> bitstreams;
|
||||
protected MostRecentChecksumService checksumService =
|
||||
CheckerServiceFactory.getInstance().getMostRecentChecksumService();
|
||||
|
||||
@Before
|
||||
public void setup() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
Community parentCommunity = CommunityBuilder.createCommunity(context).build();
|
||||
Collection collection = CollectionBuilder.createCollection(context, parentCommunity)
|
||||
.build();
|
||||
Item item = ItemBuilder.createItem(context, collection).withTitle("Test item")
|
||||
.build();
|
||||
|
||||
int numBitstreams = 3;
|
||||
bitstreams = new ArrayList<>();
|
||||
for (int i = 0; i < numBitstreams; i++) {
|
||||
String content = "Test bitstream " + i;
|
||||
bitstreams.add(
|
||||
BitstreamBuilder.createBitstream(
|
||||
context, item, IOUtils.toInputStream(content, UTF_8)
|
||||
).build()
|
||||
);
|
||||
}
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
// Call the "updateMissingBitstreams" method so that the test bitstreams
|
||||
// already have checksums in the past when CheckerCommand runs.
|
||||
// Otherwise, the CheckerCommand will simply update the test
|
||||
// bitstreams without going through the BitstreamDispatcher.
|
||||
checksumService = CheckerServiceFactory.getInstance().getMostRecentChecksumService();
|
||||
checksumService.updateMissingBitstreams(context);
|
||||
|
||||
// The "updateMissingBitstreams" method updates the test bitstreams in
|
||||
// a random order. To verify that the expected bitstreams were
|
||||
// processed, reset the timestamps so that the bitstreams are
|
||||
// checked in a specific order (oldest first).
|
||||
Instant checksumInstant = Instant.ofEpochMilli(0);
|
||||
for (Bitstream bitstream: bitstreams) {
|
||||
MostRecentChecksum mrc = checksumService.findByBitstream(context, bitstream);
|
||||
mrc.setProcessStartDate(Date.from(checksumInstant));
|
||||
mrc.setProcessEndDate(Date.from(checksumInstant));
|
||||
checksumInstant = checksumInstant.plusSeconds(10);
|
||||
}
|
||||
context.commit();
|
||||
}
|
||||
|
||||
@After
|
||||
public void cleanUp() throws SQLException {
|
||||
// Need to clean up ChecksumHistory because of a referential integrity
|
||||
// constraint violation between the most_recent_checksum table and
|
||||
// bitstream tables
|
||||
ChecksumHistoryService checksumHistoryService = CheckerServiceFactory.getInstance().getChecksumHistoryService();
|
||||
|
||||
for (Bitstream bitstream: bitstreams) {
|
||||
checksumHistoryService.deleteByBitstream(context, bitstream);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testChecksumsRecordedWhenProcesingIsInterrupted() throws SQLException {
|
||||
CheckerCommand checker = new CheckerCommand(context);
|
||||
|
||||
// The start date to use for the checker process
|
||||
Date checkerStartDate = Date.from(Instant.now());
|
||||
|
||||
// Verify that all checksums are before the checker start date
|
||||
for (Bitstream bitstream: bitstreams) {
|
||||
MostRecentChecksum checksum = checksumService.findByBitstream(context, bitstream);
|
||||
Date lastChecksumDate = checksum.getProcessStartDate();
|
||||
assertTrue("lastChecksumDate (" + lastChecksumDate + ") <= checkerStartDate (" + checkerStartDate + ")",
|
||||
lastChecksumDate.before(checkerStartDate));
|
||||
}
|
||||
|
||||
// Dispatcher that throws an exception when a third bitstream is
|
||||
// retrieved.
|
||||
BitstreamDispatcher dispatcher = new ExpectionThrowingDispatcher(
|
||||
context, checkerStartDate, false, 2);
|
||||
checker.setDispatcher(dispatcher);
|
||||
|
||||
|
||||
// Run the checksum checker
|
||||
checker.setProcessStartDate(checkerStartDate);
|
||||
try {
|
||||
checker.process();
|
||||
fail("SQLException should have been thrown");
|
||||
} catch (SQLException sqle) {
|
||||
// Rollback any pending transaction
|
||||
context.rollback();
|
||||
}
|
||||
|
||||
// Verify that the checksums of the first two bitstreams (that were
|
||||
// processed before the exception) have been successfully recorded in
|
||||
// the database, while the third bitstream was not updated.
|
||||
int bitstreamCount = 0;
|
||||
for (Bitstream bitstream: bitstreams) {
|
||||
MostRecentChecksum checksum = checksumService.findByBitstream(context, bitstream);
|
||||
Date lastChecksumDate = checksum.getProcessStartDate();
|
||||
|
||||
bitstreamCount = bitstreamCount + 1;
|
||||
if (bitstreamCount <= 2) {
|
||||
assertTrue("lastChecksumDate (" + lastChecksumDate + ") <= checkerStartDate (" + checkerStartDate + ")",
|
||||
lastChecksumDate.after(checkerStartDate));
|
||||
} else {
|
||||
assertTrue("lastChecksumDate (" + lastChecksumDate + ") >= checkerStartDate (" + checkerStartDate + ")",
|
||||
lastChecksumDate.before(checkerStartDate));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Subclass of SimpleDispatcher that only allows a limited number of "next"
|
||||
* class before throwing a SQLException.
|
||||
*/
|
||||
class ExpectionThrowingDispatcher extends SimpleDispatcher {
|
||||
// The number of "next" calls to allow before throwing a SQLException
|
||||
protected int maxNextCalls;
|
||||
|
||||
// The number of "next" method calls seen so far.
|
||||
protected int numNextCalls = 0;
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
* @param context Context
|
||||
* @param startTime timestamp for beginning of checker process
|
||||
* @param looping indicates whether checker should loop infinitely
|
||||
* through most_recent_checksum table
|
||||
* @param maxNextCalls the number of "next" method calls to allow before
|
||||
* throwing a SQLException.
|
||||
*/
|
||||
public ExpectionThrowingDispatcher(Context context, Date startTime, boolean looping, int maxNextCalls) {
|
||||
super(context, startTime, looping);
|
||||
this.maxNextCalls = maxNextCalls;
|
||||
}
|
||||
|
||||
/**
|
||||
* Selects the next candidate bitstream.
|
||||
*
|
||||
* After "maxNextClass" number of calls, this method throws a
|
||||
* SQLException.
|
||||
*
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
@Override
|
||||
public synchronized Bitstream next() throws SQLException {
|
||||
numNextCalls = numNextCalls + 1;
|
||||
if (numNextCalls > maxNextCalls) {
|
||||
throw new SQLException("Max 'next' method calls exceeded");
|
||||
}
|
||||
return super.next();
|
||||
}
|
||||
}
|
||||
}
|
@@ -12,6 +12,7 @@ import static org.hamcrest.CoreMatchers.not;
|
||||
import static org.hamcrest.CoreMatchers.notNullValue;
|
||||
import static org.hamcrest.CoreMatchers.nullValue;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
@@ -25,6 +26,9 @@ import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
@@ -148,6 +152,44 @@ public class BitstreamTest extends AbstractDSpaceObjectTest {
|
||||
assertTrue("testFindAll 2", added);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFindAllBatches() throws Exception {
|
||||
//Adding some data for processing and cleaning this up at the end
|
||||
context.turnOffAuthorisationSystem();
|
||||
File f = new File(testProps.get("test.bitstream").toString());
|
||||
List<Bitstream> inserted = new ArrayList<>();
|
||||
for (int i = 0; i < 5; i++) {
|
||||
Bitstream bs = bitstreamService.create(context, new FileInputStream(f));
|
||||
inserted.add(bs);
|
||||
}
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
// sorted list of all bitstreams
|
||||
List<Bitstream> all = bitstreamService.findAll(context);
|
||||
List<Bitstream> expected = new ArrayList<>(all);
|
||||
expected.sort(Comparator.comparing(bs -> bs.getID().toString()));
|
||||
|
||||
int total = bitstreamService.countTotal(context);
|
||||
int batchSize = 2;
|
||||
int numberOfBatches = (int) Math.ceil((double) total / batchSize);
|
||||
|
||||
//collect in batches
|
||||
List<Bitstream> collected = new ArrayList<>();
|
||||
for (int i = 0; i < numberOfBatches; i++) {
|
||||
Iterator<Bitstream> it = bitstreamService.findAll(context, batchSize, i * batchSize);
|
||||
it.forEachRemaining(collected::add);
|
||||
}
|
||||
|
||||
assertEquals("Batched results should match sorted findAll", expected, collected);
|
||||
|
||||
// Cleanup
|
||||
context.turnOffAuthorisationSystem();
|
||||
for (Bitstream b : inserted) {
|
||||
bitstreamService.delete(context, b);
|
||||
}
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of create method, of class Bitstream.
|
||||
*/
|
||||
|
@@ -8,6 +8,7 @@
|
||||
package org.dspace.content.authority;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
|
||||
import java.io.IOException;
|
||||
@@ -86,6 +87,7 @@ public class DSpaceControlledVocabularyTest extends AbstractDSpaceTest {
|
||||
CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(Class.forName(PLUGIN_INTERFACE), "farm");
|
||||
assertNotNull(instance);
|
||||
Choices result = instance.getMatches(text, start, limit, locale);
|
||||
assertNotEquals("At least one match expected", 0, result.values.length);
|
||||
assertEquals("north 40", result.values[0].value);
|
||||
}
|
||||
|
||||
|
@@ -73,6 +73,9 @@ public class OrcidEntityFactoryServiceIT extends AbstractIntegrationTestWithData
|
||||
|
||||
private Collection projects;
|
||||
|
||||
private static final String isbn = "978-0-439-02348-1";
|
||||
private static final String issn = "1234-1234X";
|
||||
|
||||
@Before
|
||||
public void setup() {
|
||||
|
||||
@@ -117,6 +120,7 @@ public class OrcidEntityFactoryServiceIT extends AbstractIntegrationTestWithData
|
||||
.withLanguage("en_US")
|
||||
.withType("Book")
|
||||
.withIsPartOf("Journal")
|
||||
.withISBN(isbn)
|
||||
.withDoiIdentifier("doi-id")
|
||||
.withScopusIdentifier("scopus-id")
|
||||
.build();
|
||||
@@ -149,13 +153,102 @@ public class OrcidEntityFactoryServiceIT extends AbstractIntegrationTestWithData
|
||||
assertThat(work.getExternalIdentifiers(), notNullValue());
|
||||
|
||||
List<ExternalID> externalIds = work.getExternalIdentifiers().getExternalIdentifier();
|
||||
assertThat(externalIds, hasSize(3));
|
||||
assertThat(externalIds, hasSize(4));
|
||||
assertThat(externalIds, has(selfExternalId("doi", "doi-id")));
|
||||
assertThat(externalIds, has(selfExternalId("eid", "scopus-id")));
|
||||
assertThat(externalIds, has(selfExternalId("handle", publication.getHandle())));
|
||||
// Book type should have SELF rel for ISBN
|
||||
assertThat(externalIds, has(selfExternalId("isbn", isbn)));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testJournalArticleAndISSN() {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
Item publication = ItemBuilder.createItem(context, publications)
|
||||
.withTitle("Test publication")
|
||||
.withAuthor("Walter White")
|
||||
.withAuthor("Jesse Pinkman")
|
||||
.withEditor("Editor")
|
||||
.withIssueDate("2021-04-30")
|
||||
.withDescriptionAbstract("Publication description")
|
||||
.withLanguage("en_US")
|
||||
.withType("Article")
|
||||
.withIsPartOf("Journal")
|
||||
.withISSN(issn)
|
||||
.withDoiIdentifier("doi-id")
|
||||
.withScopusIdentifier("scopus-id")
|
||||
.build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
Activity activity = entityFactoryService.createOrcidObject(context, publication);
|
||||
assertThat(activity, instanceOf(Work.class));
|
||||
|
||||
Work work = (Work) activity;
|
||||
assertThat(work.getJournalTitle(), notNullValue());
|
||||
assertThat(work.getJournalTitle().getContent(), is("Journal"));
|
||||
assertThat(work.getLanguageCode(), is("en"));
|
||||
assertThat(work.getPublicationDate(), matches(date("2021", "04", "30")));
|
||||
assertThat(work.getShortDescription(), is("Publication description"));
|
||||
assertThat(work.getPutCode(), nullValue());
|
||||
assertThat(work.getWorkType(), is(WorkType.JOURNAL_ARTICLE));
|
||||
assertThat(work.getWorkTitle(), notNullValue());
|
||||
assertThat(work.getWorkTitle().getTitle(), notNullValue());
|
||||
assertThat(work.getWorkTitle().getTitle().getContent(), is("Test publication"));
|
||||
assertThat(work.getWorkContributors(), notNullValue());
|
||||
assertThat(work.getUrl(), matches(urlEndsWith(publication.getHandle())));
|
||||
|
||||
List<Contributor> contributors = work.getWorkContributors().getContributor();
|
||||
assertThat(contributors, hasSize(3));
|
||||
assertThat(contributors, has(contributor("Walter White", AUTHOR, FIRST)));
|
||||
assertThat(contributors, has(contributor("Editor", EDITOR, FIRST)));
|
||||
assertThat(contributors, has(contributor("Jesse Pinkman", AUTHOR, ADDITIONAL)));
|
||||
|
||||
assertThat(work.getExternalIdentifiers(), notNullValue());
|
||||
|
||||
List<ExternalID> externalIds = work.getExternalIdentifiers().getExternalIdentifier();
|
||||
assertThat(externalIds, hasSize(4));
|
||||
assertThat(externalIds, has(selfExternalId("doi", "doi-id")));
|
||||
assertThat(externalIds, has(selfExternalId("eid", "scopus-id")));
|
||||
assertThat(externalIds, has(selfExternalId("handle", publication.getHandle())));
|
||||
// journal-article should have PART_OF rel for ISSN
|
||||
assertThat(externalIds, has(externalId("issn", issn, Relationship.PART_OF)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testJournalWithISSN() {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
Item publication = ItemBuilder.createItem(context, publications)
|
||||
.withTitle("Test journal")
|
||||
.withEditor("Editor")
|
||||
.withType("Journal")
|
||||
.withISSN(issn)
|
||||
.build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
Activity activity = entityFactoryService.createOrcidObject(context, publication);
|
||||
assertThat(activity, instanceOf(Work.class));
|
||||
|
||||
Work work = (Work) activity;
|
||||
assertThat(work.getWorkType(), is(WorkType.JOURNAL_ISSUE));
|
||||
assertThat(work.getWorkTitle(), notNullValue());
|
||||
assertThat(work.getWorkTitle().getTitle(), notNullValue());
|
||||
assertThat(work.getWorkTitle().getTitle().getContent(), is("Test journal"));
|
||||
assertThat(work.getUrl(), matches(urlEndsWith(publication.getHandle())));
|
||||
|
||||
assertThat(work.getExternalIdentifiers(), notNullValue());
|
||||
|
||||
List<ExternalID> externalIds = work.getExternalIdentifiers().getExternalIdentifier();
|
||||
assertThat(externalIds, hasSize(2));
|
||||
// journal-issue should have SELF rel for ISSN
|
||||
assertThat(externalIds, has(selfExternalId("issn", issn)));
|
||||
assertThat(externalIds, has(selfExternalId("handle", publication.getHandle())));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyWorkWithUnknownTypeCreation() {
|
||||
|
||||
@@ -163,6 +256,7 @@ public class OrcidEntityFactoryServiceIT extends AbstractIntegrationTestWithData
|
||||
|
||||
Item publication = ItemBuilder.createItem(context, publications)
|
||||
.withType("TYPE")
|
||||
.withISSN(issn)
|
||||
.build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
@@ -183,8 +277,9 @@ public class OrcidEntityFactoryServiceIT extends AbstractIntegrationTestWithData
|
||||
assertThat(work.getExternalIdentifiers(), notNullValue());
|
||||
|
||||
List<ExternalID> externalIds = work.getExternalIdentifiers().getExternalIdentifier();
|
||||
assertThat(externalIds, hasSize(1));
|
||||
assertThat(externalIds, hasSize(2));
|
||||
assertThat(externalIds, has(selfExternalId("handle", publication.getHandle())));
|
||||
assertThat(externalIds, has(externalId("issn", issn, Relationship.PART_OF)));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@@ -116,6 +116,7 @@ public class ITIrusExportUsageEventListener extends AbstractIntegrationTestWithD
|
||||
configurationService.setProperty("irus.statistics.tracker.enabled", true);
|
||||
configurationService.setProperty("irus.statistics.tracker.type-field", "dc.type");
|
||||
configurationService.setProperty("irus.statistics.tracker.type-value", "Excluded type");
|
||||
configurationService.setProperty("oai.identifier.prefix", "localhost");
|
||||
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
@@ -62,6 +62,7 @@ public class ExportEventProcessorIT extends AbstractIntegrationTestWithDatabase
|
||||
configurationService.setProperty("irus.statistics.tracker.enabled", true);
|
||||
configurationService.setProperty("irus.statistics.tracker.type-field", "dc.type");
|
||||
configurationService.setProperty("irus.statistics.tracker.type-value", "Excluded type");
|
||||
configurationService.setProperty("oai.identifier.prefix", "localhost");
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
publication = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build();
|
||||
|
@@ -0,0 +1,262 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.storage.bitstore;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.dspace.AbstractIntegrationTestWithDatabase;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.builder.BitstreamBuilder;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.builder.ItemBuilder;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.BitstreamService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.storage.bitstore.factory.StorageServiceFactory;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
|
||||
public class BitstreamStorageServiceImplIT extends AbstractIntegrationTestWithDatabase {
|
||||
private BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();
|
||||
private BitstreamStorageServiceImpl bitstreamStorageService =
|
||||
(BitstreamStorageServiceImpl) StorageServiceFactory.getInstance().getBitstreamStorageService();
|
||||
private Collection collection;
|
||||
|
||||
private Map<Integer, BitStoreService> originalBitstores;
|
||||
|
||||
private static final Integer SOURCE_STORE = 0;
|
||||
private static final Integer DEST_STORE = 1;
|
||||
|
||||
@Rule
|
||||
public final TemporaryFolder tempStoreDir = new TemporaryFolder();
|
||||
|
||||
@Before
|
||||
public void setup() throws Exception {
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.build();
|
||||
|
||||
collection = CollectionBuilder.createCollection(context, parentCommunity)
|
||||
.build();
|
||||
|
||||
originalBitstores = bitstreamStorageService.getStores();
|
||||
Map<Integer, BitStoreService> stores = new HashMap<>();
|
||||
DSBitStoreService sourceStore = new DSBitStoreService();
|
||||
sourceStore.setBaseDir(tempStoreDir.newFolder("src"));
|
||||
|
||||
stores.put(SOURCE_STORE, sourceStore);
|
||||
bitstreamStorageService.setStores(stores);
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
@After
|
||||
public void cleanUp() throws IOException {
|
||||
// Restore the bitstore storage stores
|
||||
bitstreamStorageService.setStores(originalBitstores);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test batch commit checkpointing, using the default batch commit size of 1
|
||||
*
|
||||
* @throws Exception if an exception occurs.
|
||||
*/
|
||||
@Test
|
||||
public void testDefaultBatchCommitSize() throws Exception {
|
||||
Context context = this.context;
|
||||
|
||||
// Destination assetstore fails after two bitstreams have been migrated
|
||||
DSBitStoreService destinationStore = new LimitedTempDSBitStoreService(tempStoreDir, 2);
|
||||
Map<Integer, BitStoreService> stores = bitstreamStorageService.getStores();
|
||||
stores.put(DEST_STORE, destinationStore);
|
||||
|
||||
// Create three bitstreams in the source assetstore
|
||||
createBitstreams(context, 3);
|
||||
|
||||
// Three bitstreams in source assetstore at the start
|
||||
assertThat(bitstreamService.countByStoreNumber(context, SOURCE_STORE).intValue(), equalTo(3));
|
||||
|
||||
// No bitstreams in destination assetstore at the start
|
||||
assertThat(bitstreamService.countByStoreNumber(context, DEST_STORE).intValue(), equalTo(0));
|
||||
|
||||
/// Commit any pending transaction to database
|
||||
context.commit();
|
||||
|
||||
// Migrate bitstreams
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
boolean deleteOld = false;
|
||||
Integer batchCommitSize = 1;
|
||||
try {
|
||||
bitstreamStorageService.migrate(
|
||||
context, SOURCE_STORE, DEST_STORE, deleteOld,
|
||||
batchCommitSize
|
||||
);
|
||||
fail("IOException should have been thrown");
|
||||
} catch (IOException ioe) {
|
||||
// Rollback any pending transaction
|
||||
context.rollback();
|
||||
}
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
// One bitstream should still be in the source assetstore, due to the
|
||||
// interrupted migration
|
||||
assertThat(bitstreamService.countByStoreNumber(context, SOURCE_STORE).intValue(), equalTo(1));
|
||||
|
||||
// Two bitstreams should have migrated to the destination assetstore
|
||||
assertThat(bitstreamService.countByStoreNumber(context, DEST_STORE).intValue(), equalTo(2));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test batch commit checkpointing, using the default batch commit size of 3
|
||||
*
|
||||
* @throws Exception if an exception occurs.
|
||||
*/
|
||||
@Test
|
||||
public void testBatchCommitSizeThree() throws Exception {
|
||||
Context context = this.context;
|
||||
|
||||
// Destination assetstore fails after four bitstreams have been migrated
|
||||
DSBitStoreService destinationStore = new LimitedTempDSBitStoreService(tempStoreDir, 4);
|
||||
Map<Integer, BitStoreService> stores = bitstreamStorageService.getStores();
|
||||
stores.put(DEST_STORE, destinationStore);
|
||||
|
||||
// Create five bitstreams in the source assetstore
|
||||
createBitstreams(context, 5);
|
||||
|
||||
// Five bitstreams in source assetstore at the start
|
||||
assertThat(bitstreamService.countByStoreNumber(context, SOURCE_STORE).intValue(), equalTo(5));
|
||||
|
||||
// No bitstreams in destination assetstore at the start
|
||||
assertThat(bitstreamService.countByStoreNumber(context, DEST_STORE).intValue(), equalTo(0));
|
||||
|
||||
// Commit any pending transaction to database
|
||||
context.commit();
|
||||
|
||||
// Migrate bitstreams
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
boolean deleteOld = false;
|
||||
Integer batchCommitSize = 3;
|
||||
try {
|
||||
bitstreamStorageService.migrate(
|
||||
context, SOURCE_STORE, DEST_STORE, deleteOld,
|
||||
batchCommitSize
|
||||
);
|
||||
fail("IOException should have been thrown");
|
||||
} catch (IOException ioe) {
|
||||
// Rollback any pending transaction
|
||||
context.rollback();
|
||||
}
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
// Since the batch commit size is 3, only three bitstreams should be
|
||||
// marked as migrated, so there should still be two bitstreams
|
||||
// in the source assetstore, due to the interrupted migration
|
||||
assertThat(bitstreamService.countByStoreNumber(context, SOURCE_STORE).intValue(), equalTo(2));
|
||||
|
||||
// Three bitstreams should have migrated to the destination assetstore
|
||||
assertThat(bitstreamService.countByStoreNumber(context, DEST_STORE).intValue(), equalTo(3));
|
||||
}
|
||||
|
||||
private void createBitstreams(Context context, int numBitstreams)
|
||||
throws SQLException {
|
||||
context.turnOffAuthorisationSystem();
|
||||
for (int i = 0; i < numBitstreams; i++) {
|
||||
String content = "Test bitstream " + i;
|
||||
createBitstream(content);
|
||||
}
|
||||
context.restoreAuthSystemState();
|
||||
context.commit();
|
||||
}
|
||||
|
||||
private Bitstream createBitstream(String content) {
|
||||
try {
|
||||
return BitstreamBuilder
|
||||
.createBitstream(context, createItem(), toInputStream(content))
|
||||
.build();
|
||||
} catch (SQLException | AuthorizeException | IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private Item createItem() {
|
||||
return ItemBuilder.createItem(context, collection)
|
||||
.withTitle("Test item")
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
private InputStream toInputStream(String content) {
|
||||
return IOUtils.toInputStream(content, UTF_8);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* DSBitStoreService variation that only allows a limited number of puts
|
||||
* to the bit store before throwing an IOException, to test the
|
||||
* error handling of the BitstreamStorageService.migrate() method.
|
||||
*/
|
||||
class LimitedTempDSBitStoreService extends DSBitStoreService {
|
||||
// The number of put calls allowed before throwing an IOException
|
||||
protected int maxPuts = Integer.MAX_VALUE;
|
||||
|
||||
// The number of "put" method class seen so far.
|
||||
protected int putCallCount = 0;
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
* @param maxPuts the number of put calls to allow before throwing an
|
||||
* IOException
|
||||
*/
|
||||
public LimitedTempDSBitStoreService(TemporaryFolder tempStoreDir, int maxPuts) throws IOException {
|
||||
super();
|
||||
setBaseDir(tempStoreDir.newFolder());
|
||||
this.maxPuts = maxPuts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Store a stream of bits.
|
||||
*
|
||||
* After "maxPut" number of calls, this method throws an IOException.
|
||||
* @param in The stream of bits to store
|
||||
* @throws java.io.IOException If a problem occurs while storing the bits
|
||||
*/
|
||||
@Override
|
||||
public void put(Bitstream bitstream, InputStream in) throws IOException {
|
||||
putCallCount = putCallCount + 1;
|
||||
if (putCallCount > maxPuts) {
|
||||
throw new IOException("Max 'put' method calls exceeded");
|
||||
} else {
|
||||
super.put(bitstream, in);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -26,9 +26,11 @@ import static org.junit.Assert.assertTrue;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Paths;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@@ -41,7 +43,6 @@ import com.amazonaws.services.s3.model.AmazonS3Exception;
|
||||
import com.amazonaws.services.s3.model.Bucket;
|
||||
import com.amazonaws.services.s3.model.ObjectMetadata;
|
||||
import io.findify.s3mock.S3Mock;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang.BooleanUtils;
|
||||
import org.dspace.AbstractIntegrationTestWithDatabase;
|
||||
@@ -80,8 +81,6 @@ public class S3BitStoreServiceIT extends AbstractIntegrationTestWithDatabase {
|
||||
|
||||
private Collection collection;
|
||||
|
||||
private File s3Directory;
|
||||
|
||||
private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
|
||||
@@ -89,9 +88,8 @@ public class S3BitStoreServiceIT extends AbstractIntegrationTestWithDatabase {
|
||||
public void setup() throws Exception {
|
||||
|
||||
configurationService.setProperty("assetstore.s3.enabled", "true");
|
||||
s3Directory = new File(System.getProperty("java.io.tmpdir"), "s3");
|
||||
|
||||
s3Mock = S3Mock.create(8001, s3Directory.getAbsolutePath());
|
||||
s3Mock = new S3Mock.Builder().withPort(8001).withInMemoryBackend().build();
|
||||
s3Mock.start();
|
||||
|
||||
amazonS3Client = createAmazonS3Client();
|
||||
@@ -112,8 +110,7 @@ public class S3BitStoreServiceIT extends AbstractIntegrationTestWithDatabase {
|
||||
}
|
||||
|
||||
@After
|
||||
public void cleanUp() throws IOException {
|
||||
FileUtils.deleteDirectory(s3Directory);
|
||||
public void cleanUp() {
|
||||
s3Mock.shutdown();
|
||||
}
|
||||
|
||||
@@ -337,7 +334,7 @@ public class S3BitStoreServiceIT extends AbstractIntegrationTestWithDatabase {
|
||||
String computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
int slashes = computeSlashes(path.toString());
|
||||
assertThat(computedPath, Matchers.endsWith(File.separator));
|
||||
assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
|
||||
assertThat(countPathElements(computedPath), Matchers.equalTo(slashes));
|
||||
|
||||
path.append("2");
|
||||
computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
@@ -362,31 +359,31 @@ public class S3BitStoreServiceIT extends AbstractIntegrationTestWithDatabase {
|
||||
String computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
int slashes = computeSlashes(path.toString());
|
||||
assertThat(computedPath, Matchers.endsWith(File.separator));
|
||||
assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
|
||||
assertThat(countPathElements(computedPath), Matchers.equalTo(slashes));
|
||||
|
||||
path.append("2");
|
||||
computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
slashes = computeSlashes(path.toString());
|
||||
assertThat(computedPath, Matchers.endsWith(File.separator));
|
||||
assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
|
||||
assertThat(countPathElements(computedPath), Matchers.equalTo(slashes));
|
||||
|
||||
path.append("3");
|
||||
computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
slashes = computeSlashes(path.toString());
|
||||
assertThat(computedPath, Matchers.endsWith(File.separator));
|
||||
assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
|
||||
assertThat(countPathElements(computedPath), Matchers.equalTo(slashes));
|
||||
|
||||
path.append("4");
|
||||
computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
slashes = computeSlashes(path.toString());
|
||||
assertThat(computedPath, Matchers.endsWith(File.separator));
|
||||
assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
|
||||
assertThat(countPathElements(computedPath), Matchers.equalTo(slashes));
|
||||
|
||||
path.append("56789");
|
||||
computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
slashes = computeSlashes(path.toString());
|
||||
assertThat(computedPath, Matchers.endsWith(File.separator));
|
||||
assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
|
||||
assertThat(countPathElements(computedPath), Matchers.equalTo(slashes));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -465,4 +462,12 @@ public class S3BitStoreServiceIT extends AbstractIntegrationTestWithDatabase {
|
||||
return Math.min(slashes, S3BitStoreService.directoryLevels);
|
||||
}
|
||||
|
||||
// Count the number of elements in a Unix or Windows path.
|
||||
// We use 'Paths' instead of splitting on slashes because these OSes use different path separators.
|
||||
private int countPathElements(String stringPath) {
|
||||
List<String> pathElements = new ArrayList<>();
|
||||
Paths.get(stringPath).forEach(p -> pathElements.add(p.toString()));
|
||||
return pathElements.size();
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@@ -8,7 +8,7 @@
|
||||
<parent>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<groupId>org.dspace</groupId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@@ -16,6 +16,7 @@ import java.util.List;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoai.Element;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoai.Metadata;
|
||||
import com.lyncode.xoai.util.Base64Utils;
|
||||
import org.apache.commons.text.StringEscapeUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.util.factory.UtilServiceFactory;
|
||||
@@ -142,7 +143,7 @@ public class ItemUtils {
|
||||
bitstream.getField().add(createValue("name", name));
|
||||
}
|
||||
if (oname != null) {
|
||||
bitstream.getField().add(createValue("originalName", name));
|
||||
bitstream.getField().add(createValue("originalName", oname));
|
||||
}
|
||||
if (description != null) {
|
||||
bitstream.getField().add(createValue("description", description));
|
||||
@@ -159,6 +160,19 @@ public class ItemUtils {
|
||||
return bundles;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitizes a string to remove characters that are invalid
|
||||
* in XML 1.0 using the Apache Commons Text library.
|
||||
* @param value The string to sanitize.
|
||||
* @return A sanitized string, or null if the input was null.
|
||||
*/
|
||||
private static String sanitize(String value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
return StringEscapeUtils.escapeXml10(value);
|
||||
}
|
||||
|
||||
private static Element createLicenseElement(Context context, Item item)
|
||||
throws SQLException, AuthorizeException, IOException {
|
||||
Element license = create("license");
|
||||
@@ -232,7 +246,7 @@ public class ItemUtils {
|
||||
valueElem = language;
|
||||
}
|
||||
|
||||
valueElem.getField().add(createValue("value", val.getValue()));
|
||||
valueElem.getField().add(createValue("value", sanitize(val.getValue())));
|
||||
if (val.getAuthority() != null) {
|
||||
valueElem.getField().add(createValue("authority", val.getAuthority()));
|
||||
if (val.getConfidence() != Choices.CF_NOVALUE) {
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@@ -3,7 +3,7 @@
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-rest</artifactId>
|
||||
<packaging>war</packaging>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<name>DSpace (Deprecated) REST Webapp</name>
|
||||
<description>DSpace RESTful Web Services API. NOTE: this REST API is DEPRECATED.
|
||||
Please consider using the REST API in the dspace-server-webapp instead!</description>
|
||||
@@ -12,7 +12,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -514,7 +514,7 @@
|
||||
<dependency>
|
||||
<groupId>net.minidev</groupId>
|
||||
<artifactId>json-smart</artifactId>
|
||||
<version>2.5.2</version>
|
||||
<version>2.6.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@@ -9,7 +9,10 @@ package org.dspace.app.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.time.ZoneOffset;
|
||||
import java.util.List;
|
||||
import java.util.TimeZone;
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.servlet.Filter;
|
||||
|
||||
import org.dspace.app.rest.filter.DSpaceRequestContextFilter;
|
||||
@@ -262,4 +265,12 @@ public class Application extends SpringBootServletInitializer {
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
public void setDefaultTimezone() {
|
||||
// Set the default timezone in Spring Boot to UTC.
|
||||
// This ensures that Spring Boot doesn't attempt to change the timezone of dates that are read from the
|
||||
// database (via Hibernate). We store all dates in the database as UTC.
|
||||
TimeZone.setDefault(TimeZone.getTimeZone(ZoneOffset.UTC));
|
||||
}
|
||||
}
|
||||
|
@@ -87,6 +87,7 @@ public class OrcidLoginFilter extends StatelessLoginFilter {
|
||||
String baseRediredirectUrl = configurationService.getProperty("dspace.ui.url");
|
||||
String redirectUrl = baseRediredirectUrl + "/error?status=401&code=orcid.generic-error";
|
||||
response.sendRedirect(redirectUrl); // lgtm [java/unvalidated-url-redirection]
|
||||
this.closeOpenContext(request);
|
||||
} else {
|
||||
super.unsuccessfulAuthentication(request, response, failed);
|
||||
}
|
||||
|
@@ -8,11 +8,14 @@
|
||||
package org.dspace.app.rest.security;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import javax.servlet.FilterChain;
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.dspace.app.rest.utils.ContextUtil;
|
||||
import org.dspace.core.Context;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.security.authentication.AuthenticationManager;
|
||||
@@ -122,6 +125,27 @@ public class StatelessLoginFilter extends AbstractAuthenticationProcessingFilter
|
||||
response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Authentication failed!");
|
||||
log.error("Authentication failed (status:{})",
|
||||
HttpServletResponse.SC_UNAUTHORIZED, failed);
|
||||
this.closeOpenContext(request);
|
||||
}
|
||||
|
||||
/**
|
||||
* Manually closes the open {@link Context} if one exists. We need to do this manually because
|
||||
* {@link #continueChainBeforeSuccessfulAuthentication} is {@code false} by default, which prevents the
|
||||
* {@link org.dspace.app.rest.filter.DSpaceRequestContextFilter} from being called. Without this call, the request
|
||||
* would leave an open database connection.
|
||||
*
|
||||
* @param request The current request.
|
||||
*/
|
||||
protected void closeOpenContext(HttpServletRequest request) {
|
||||
if (ContextUtil.isContextAvailable(request)) {
|
||||
try (Context context = ContextUtil.obtainContext(request)) {
|
||||
if (context != null && context.isValid()) {
|
||||
context.complete();
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -11,6 +11,8 @@ import java.sql.SQLException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.rest.model.MetadataValueRest;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
@@ -27,6 +29,8 @@ import org.dspace.core.Utils;
|
||||
public abstract class MetadataValueRemovePatchOperation<DSO extends DSpaceObject>
|
||||
extends RemovePatchOperation<MetadataValueRest> {
|
||||
|
||||
private static final Logger log = LogManager.getLogger();
|
||||
|
||||
@Override
|
||||
protected Class<MetadataValueRest[]> getArrayClassForEvaluation() {
|
||||
return MetadataValueRest[].class;
|
||||
@@ -42,7 +46,12 @@ public abstract class MetadataValueRemovePatchOperation<DSO extends DSpaceObject
|
||||
List<MetadataValue> mm = getDSpaceObjectService().getMetadata(source, metadata[0], metadata[1], metadata[2],
|
||||
Item.ANY);
|
||||
if (index != -1) {
|
||||
if (index < mm.size()) {
|
||||
getDSpaceObjectService().removeMetadataValues(context, source, Arrays.asList(mm.get(index)));
|
||||
} else {
|
||||
log.warn("value of index ({}) is out of range of the metadata value list of size {} (target: {})",
|
||||
index, mm.size(), target);
|
||||
}
|
||||
} else {
|
||||
getDSpaceObjectService().clearMetadata(context, source, metadata[0], metadata[1], metadata[2], Item.ANY);
|
||||
}
|
||||
|
@@ -9,9 +9,11 @@ package org.dspace.app.rest.utils;
|
||||
|
||||
import static java.util.Objects.isNull;
|
||||
import static java.util.Objects.nonNull;
|
||||
import static javax.mail.internet.MimeUtility.encodeText;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URLEncoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.text.Normalizer;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Objects;
|
||||
@@ -171,9 +173,16 @@ public class HttpHeadersInitializer {
|
||||
|
||||
// distposition may be null here if contentType is null
|
||||
if (!isNullOrEmpty(disposition)) {
|
||||
httpHeaders.put(CONTENT_DISPOSITION, Collections.singletonList(String.format(CONTENT_DISPOSITION_FORMAT,
|
||||
String fallbackAsciiName = createFallbackAsciiName(this.fileName);
|
||||
String encodedUtf8Name = createEncodedUtf8Name(this.fileName);
|
||||
|
||||
String headerValue = String.format(
|
||||
"%s; filename=\"%s\"; filename*=UTF-8''%s",
|
||||
disposition,
|
||||
encodeText(fileName))));
|
||||
fallbackAsciiName,
|
||||
encodedUtf8Name
|
||||
);
|
||||
httpHeaders.put(CONTENT_DISPOSITION, Collections.singletonList(headerValue));
|
||||
}
|
||||
log.debug("Content-Disposition : {}", disposition);
|
||||
|
||||
@@ -261,4 +270,41 @@ public class HttpHeadersInitializer {
|
||||
return Arrays.binarySearch(matchValues, toMatch) > -1 || Arrays.binarySearch(matchValues, "*") > -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a safe ASCII-only fallback filename by removing diacritics (accents)
|
||||
* and replacing any remaining non-ASCII characters.
|
||||
* E.g., "ä-ö-é.pdf" becomes "a-o-e.pdf".
|
||||
* @param originalFilename The original filename.
|
||||
* @return A string containing only ASCII characters.
|
||||
*/
|
||||
private String createFallbackAsciiName(String originalFilename) {
|
||||
if (originalFilename == null) {
|
||||
return "";
|
||||
}
|
||||
String normalized = Normalizer.normalize(originalFilename, Normalizer.Form.NFD);
|
||||
String withoutAccents = normalized.replaceAll("\\p{InCombiningDiacriticalMarks}+", "");
|
||||
return withoutAccents.replaceAll("[^\\x00-\\x7F]", "");
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a percent-encoded UTF-8 filename according to RFC 5987.
|
||||
* This is for the `filename*` parameter.
|
||||
* E.g., "ä ö é.pdf" becomes "%C3%A4%20%C3%B6%20%C3%A9.pdf".
|
||||
* @param originalFilename The original filename.
|
||||
* @return A percent-encoded string.
|
||||
*/
|
||||
private String createEncodedUtf8Name(String originalFilename) {
|
||||
if (originalFilename == null) {
|
||||
return "";
|
||||
}
|
||||
try {
|
||||
String encoded = URLEncoder.encode(originalFilename, StandardCharsets.UTF_8.toString());
|
||||
return encoded.replace("+", "%20");
|
||||
} catch (java.io.UnsupportedEncodingException e) {
|
||||
// Fallback to a simple ASCII name if encoding fails.
|
||||
log.error("UTF-8 encoding not supported, which should not happen.", e);
|
||||
return createFallbackAsciiName(originalFilename);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -27,6 +27,7 @@ import org.dspace.content.Site;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.statistics.Dataset;
|
||||
import org.dspace.statistics.content.DatasetDSpaceObjectGenerator;
|
||||
import org.dspace.statistics.content.DatasetTimeGenerator;
|
||||
@@ -46,6 +47,9 @@ import org.springframework.stereotype.Component;
|
||||
@Component
|
||||
public class UsageReportUtils {
|
||||
|
||||
@Autowired
|
||||
private ConfigurationService configurationService;
|
||||
|
||||
@Autowired
|
||||
private HandleService handleService;
|
||||
|
||||
@@ -135,13 +139,14 @@ public class UsageReportUtils {
|
||||
*/
|
||||
private UsageReportRest resolveGlobalUsageReport(Context context)
|
||||
throws SQLException, IOException, ParseException, SolrServerException {
|
||||
int topItemsLimit = configurationService.getIntProperty("usage-statistics.topItemsLimit", 10);
|
||||
|
||||
StatisticsListing statListing = new StatisticsListing(
|
||||
new StatisticsDataVisits());
|
||||
|
||||
// Adding a new generator for our top 10 items without a name length delimiter
|
||||
// Adding a new generator for our top n items without a name length delimiter
|
||||
DatasetDSpaceObjectGenerator dsoAxis = new DatasetDSpaceObjectGenerator();
|
||||
// TODO make max nr of top items (views wise)? Must be set
|
||||
dsoAxis.addDsoChild(Constants.ITEM, 10, false, -1);
|
||||
dsoAxis.addDsoChild(Constants.ITEM, topItemsLimit, false, -1);
|
||||
statListing.addDatasetGenerator(dsoAxis);
|
||||
|
||||
Dataset dataset = statListing.getDataset(context, 1);
|
||||
@@ -182,7 +187,7 @@ public class UsageReportUtils {
|
||||
UsageReportPointDsoTotalVisitsRest totalVisitPoint = new UsageReportPointDsoTotalVisitsRest();
|
||||
totalVisitPoint.setType(StringUtils.substringAfterLast(dso.getClass().getName().toLowerCase(), "."));
|
||||
totalVisitPoint.setId(dso.getID().toString());
|
||||
if (dataset.getColLabels().size() > 0) {
|
||||
if (!dataset.getColLabels().isEmpty()) {
|
||||
totalVisitPoint.setLabel(dso.getName());
|
||||
totalVisitPoint.addValue("views", Integer.valueOf(dataset.getMatrix()[0][0]));
|
||||
} else {
|
||||
@@ -205,10 +210,14 @@ public class UsageReportUtils {
|
||||
*/
|
||||
private UsageReportRest resolveTotalVisitsPerMonth(Context context, DSpaceObject dso)
|
||||
throws SQLException, IOException, ParseException, SolrServerException {
|
||||
String startDateInterval =
|
||||
configurationService.getProperty("usage-statistics.startDateInterval", "-6");
|
||||
String endDateInterval =
|
||||
configurationService.getProperty("usage-statistics.endDateInterval", "+1");
|
||||
|
||||
StatisticsTable statisticsTable = new StatisticsTable(new StatisticsDataVisits(dso));
|
||||
DatasetTimeGenerator timeAxis = new DatasetTimeGenerator();
|
||||
// TODO month start and end as request para?
|
||||
timeAxis.setDateInterval("month", "-6", "+1");
|
||||
timeAxis.setDateInterval("month", startDateInterval, endDateInterval);
|
||||
statisticsTable.addDatasetGenerator(timeAxis);
|
||||
DatasetDSpaceObjectGenerator dsoAxis = new DatasetDSpaceObjectGenerator();
|
||||
dsoAxis.addDsoChild(dso.getType(), 10, false, -1);
|
||||
@@ -275,7 +284,10 @@ public class UsageReportUtils {
|
||||
*/
|
||||
private UsageReportRest resolveTopCountries(Context context, DSpaceObject dso)
|
||||
throws SQLException, IOException, ParseException, SolrServerException {
|
||||
Dataset dataset = this.getTypeStatsDataset(context, dso, "countryCode", 1);
|
||||
int topCountriesLimit =
|
||||
configurationService.getIntProperty("usage-statistics.topCountriesLimit", 100);
|
||||
|
||||
Dataset dataset = this.getTypeStatsDataset(context, dso, "countryCode", topCountriesLimit, 1);
|
||||
|
||||
UsageReportRest usageReportRest = new UsageReportRest();
|
||||
for (int i = 0; i < dataset.getColLabels().size(); i++) {
|
||||
@@ -299,7 +311,10 @@ public class UsageReportUtils {
|
||||
*/
|
||||
private UsageReportRest resolveTopCities(Context context, DSpaceObject dso)
|
||||
throws SQLException, IOException, ParseException, SolrServerException {
|
||||
Dataset dataset = this.getTypeStatsDataset(context, dso, "city", 1);
|
||||
int topCitiesLimit =
|
||||
configurationService.getIntProperty("usage-statistics.topCitiesLimit", 100);
|
||||
|
||||
Dataset dataset = this.getTypeStatsDataset(context, dso, "city", topCitiesLimit, 1);
|
||||
|
||||
UsageReportRest usageReportRest = new UsageReportRest();
|
||||
for (int i = 0; i < dataset.getColLabels().size(); i++) {
|
||||
@@ -339,16 +354,17 @@ public class UsageReportUtils {
|
||||
* @param dso DSO we want the stats dataset of
|
||||
* @param typeAxisString String of the type we want on the axis of the dataset (corresponds to solr field),
|
||||
* examples: countryCode, city
|
||||
* @param typeAxisMax Maximum amount of results to return in the dataset
|
||||
* @param facetMinCount Minimum amount of results on a facet data point for it to be added to dataset
|
||||
* @return Stats dataset with the given type on the axis, of the given DSO and with given facetMinCount
|
||||
*/
|
||||
private Dataset getTypeStatsDataset(Context context, DSpaceObject dso, String typeAxisString, int facetMinCount)
|
||||
private Dataset getTypeStatsDataset(Context context, DSpaceObject dso, String typeAxisString, int typeAxisMax,
|
||||
int facetMinCount)
|
||||
throws SQLException, IOException, ParseException, SolrServerException {
|
||||
StatisticsListing statListing = new StatisticsListing(new StatisticsDataVisits(dso));
|
||||
DatasetTypeGenerator typeAxis = new DatasetTypeGenerator();
|
||||
typeAxis.setType(typeAxisString);
|
||||
// TODO make max nr of top countries/cities a request para? Must be set
|
||||
typeAxis.setMax(100);
|
||||
typeAxis.setMax(typeAxisMax);
|
||||
statListing.addDatasetGenerator(typeAxis);
|
||||
return statListing.getDataset(context, facetMinCount);
|
||||
}
|
||||
|
@@ -16,12 +16,12 @@ import java.io.UnsupportedEncodingException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.ProtocolVersion;
|
||||
import org.apache.http.StatusLine;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.entity.BasicHttpEntity;
|
||||
import org.apache.tools.ant.filters.StringInputStream;
|
||||
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
|
||||
import org.dspace.importer.external.datamodel.ImportRecord;
|
||||
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
|
||||
@@ -43,7 +43,8 @@ public class AbstractLiveImportIntegrationTest extends AbstractControllerIntegra
|
||||
private void checkMetadataValue(List<MetadatumDTO> list, List<MetadatumDTO> list2) {
|
||||
assertEquals(list.size(), list2.size());
|
||||
for (int i = 0; i < list.size(); i++) {
|
||||
assertTrue(sameMetadatum(list.get(i), list2.get(i)));
|
||||
assertTrue("'" + list.get(i).toString() + "' should be equal to '" + list2.get(i).toString() + "'",
|
||||
sameMetadatum(list.get(i), list2.get(i)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -70,7 +71,7 @@ public class AbstractLiveImportIntegrationTest extends AbstractControllerIntegra
|
||||
throws UnsupportedEncodingException {
|
||||
BasicHttpEntity basicHttpEntity = new BasicHttpEntity();
|
||||
basicHttpEntity.setChunked(true);
|
||||
basicHttpEntity.setContent(new StringInputStream(xmlExample));
|
||||
basicHttpEntity.setContent(IOUtils.toInputStream(xmlExample));
|
||||
|
||||
CloseableHttpResponse response = mock(CloseableHttpResponse.class);
|
||||
when(response.getStatusLine()).thenReturn(statusLine(statusCode, reason));
|
||||
|
@@ -8,7 +8,6 @@
|
||||
package org.dspace.app.rest;
|
||||
|
||||
import static java.util.UUID.randomUUID;
|
||||
import static javax.mail.internet.MimeUtility.encodeText;
|
||||
import static org.apache.commons.codec.CharEncoding.UTF_8;
|
||||
import static org.apache.commons.collections.CollectionUtils.isEmpty;
|
||||
import static org.apache.commons.io.IOUtils.toInputStream;
|
||||
@@ -347,7 +346,11 @@ public class BitstreamRestControllerIT extends AbstractControllerIntegrationTest
|
||||
//2. A public item with a bitstream
|
||||
|
||||
String bitstreamContent = "0123456789";
|
||||
String bitstreamName = "ภาษาไทย";
|
||||
String bitstreamName = "ภาษาไทย-com-acentuação.pdf";
|
||||
String expectedAscii = "-com-acentuacao.pdf";
|
||||
String expectedUtf8Encoded =
|
||||
"%E0%B8%A0%E0%B8%B2%E0%B8%A9%E0%B8%B2%E0%B9%84%E0%B8%97%E0%B8%A2-"
|
||||
+ "com-acentua%C3%A7%C3%A3o.pdf";
|
||||
|
||||
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
|
||||
|
||||
@@ -371,7 +374,9 @@ public class BitstreamRestControllerIT extends AbstractControllerIntegrationTest
|
||||
//We expect the content disposition to have the encoded bitstream name
|
||||
.andExpect(header().string(
|
||||
"Content-Disposition",
|
||||
"attachment;filename=\"" + encodeText(bitstreamName) + "\""
|
||||
String.format("attachment; filename=\"%s\"; filename*=UTF-8''%s",
|
||||
expectedAscii,
|
||||
expectedUtf8Encoded)
|
||||
));
|
||||
}
|
||||
|
||||
|
@@ -143,6 +143,7 @@ public class ResearcherProfileRestRepositoryIT extends AbstractControllerIntegra
|
||||
|
||||
user = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("user@example.com")
|
||||
.withNameInMetadata("Example", "User")
|
||||
.withPassword(password)
|
||||
.build();
|
||||
|
||||
@@ -322,7 +323,7 @@ public class ResearcherProfileRestRepositoryIT extends AbstractControllerIntegra
|
||||
public void testCreateAndReturn() throws Exception {
|
||||
|
||||
String id = user.getID().toString();
|
||||
String name = user.getName();
|
||||
String name = user.getFullName();
|
||||
|
||||
String authToken = getAuthToken(user.getEmail(), password);
|
||||
|
||||
@@ -341,6 +342,8 @@ public class ResearcherProfileRestRepositoryIT extends AbstractControllerIntegra
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.type", is("item")))
|
||||
.andExpect(jsonPath("$.metadata", matchMetadata("dspace.object.owner", name, id, 0)))
|
||||
.andExpect(jsonPath("$.metadata", matchMetadata("person.givenName", user.getFirstName(), 0)))
|
||||
.andExpect(jsonPath("$.metadata", matchMetadata("person.familyName", user.getLastName(), 0)))
|
||||
.andExpect(jsonPath("$.metadata", matchMetadata("dspace.entity.type", "Person", 0)));
|
||||
|
||||
getClient(authToken).perform(get("/api/eperson/profiles/{id}/eperson", id))
|
||||
@@ -390,7 +393,7 @@ public class ResearcherProfileRestRepositoryIT extends AbstractControllerIntegra
|
||||
public void testCreateAndReturnWithAdmin() throws Exception {
|
||||
|
||||
String id = user.getID().toString();
|
||||
String name = user.getName();
|
||||
String name = user.getFullName();
|
||||
|
||||
configurationService.setProperty("researcher-profile.collection.uuid", null);
|
||||
|
||||
@@ -411,6 +414,8 @@ public class ResearcherProfileRestRepositoryIT extends AbstractControllerIntegra
|
||||
getClient(authToken).perform(get("/api/eperson/profiles/{id}/item", id))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.type", is("item")))
|
||||
.andExpect(jsonPath("$.metadata", matchMetadata("person.givenName", user.getFirstName(), 0)))
|
||||
.andExpect(jsonPath("$.metadata", matchMetadata("person.familyName", user.getLastName(), 0)))
|
||||
.andExpect(jsonPath("$.metadata", matchMetadata("dspace.object.owner", name, id, 0)))
|
||||
.andExpect(jsonPath("$.metadata", matchMetadata("dspace.entity.type", "Person", 0)));
|
||||
|
||||
|
@@ -0,0 +1,101 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.test;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.core.Appender;
|
||||
import org.apache.logging.log4j.core.LogEvent;
|
||||
import org.apache.logging.log4j.core.LoggerContext;
|
||||
import org.apache.logging.log4j.core.appender.AbstractAppender;
|
||||
import org.apache.logging.log4j.core.config.Configuration;
|
||||
import org.apache.logging.log4j.core.config.LoggerConfig;
|
||||
import org.apache.logging.log4j.core.config.Property;
|
||||
import org.apache.logging.log4j.core.layout.PatternLayout;
|
||||
import org.junit.After;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* Test basic log4j logging functionality, extending AbstractControllerIntegrationTest
|
||||
* purely to make sure we are testing the *web application* and not just the kernel
|
||||
* as that is where logging has broken in the past.
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public class WebappLoggingIT extends AbstractControllerIntegrationTest {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(WebappLoggingIT.class);
|
||||
private static final String APPENDER_NAME = "DSpaceTestAppender";
|
||||
|
||||
static class InMemoryAppender extends AbstractAppender {
|
||||
private final List<String> messages = new ArrayList<>();
|
||||
|
||||
protected InMemoryAppender(String name) {
|
||||
super(
|
||||
name,
|
||||
null,
|
||||
PatternLayout.newBuilder().withPattern("%m").build(),
|
||||
false,
|
||||
Property.EMPTY_ARRAY
|
||||
);
|
||||
start();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void append(LogEvent event) {
|
||||
messages.add(event.getMessage().getFormattedMessage());
|
||||
}
|
||||
|
||||
public List<String> getMessages() {
|
||||
return messages;
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLogging() throws Exception {
|
||||
LoggerContext context = (LoggerContext) LogManager.getContext(false);
|
||||
Configuration config = context.getConfiguration();
|
||||
|
||||
InMemoryAppender appender = new InMemoryAppender(APPENDER_NAME);
|
||||
config.addAppender(appender);
|
||||
|
||||
LoggerConfig testLoggerConfig = new LoggerConfig(logger.getName(), Level.INFO, false);
|
||||
testLoggerConfig.addAppender(appender, null, null);
|
||||
config.addLogger(logger.getName(), testLoggerConfig);
|
||||
context.updateLoggers();
|
||||
|
||||
logger.info("DSPACE TEST LOG ENTRY");
|
||||
|
||||
List<String> messages = appender.getMessages();
|
||||
assertTrue(messages.stream().anyMatch(msg -> msg.contains("DSPACE TEST LOG ENTRY")));
|
||||
}
|
||||
|
||||
@After
|
||||
public void cleanupAppender() {
|
||||
LoggerContext context = (LoggerContext) LogManager.getContext(false);
|
||||
Configuration config = context.getConfiguration();
|
||||
|
||||
config.removeLogger(logger.getName());
|
||||
|
||||
Appender appender = config.getAppender(APPENDER_NAME);
|
||||
if (appender != null) {
|
||||
appender.stop();
|
||||
config.getAppenders().remove(APPENDER_NAME);
|
||||
}
|
||||
|
||||
context.updateLoggers();
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -10,16 +10,30 @@ package org.dspace.app.sword;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.app.rest.test.AbstractWebClientIntegrationTest;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
import org.junit.Assume;
|
||||
import org.junit.Before;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.core.io.FileSystemResource;
|
||||
import org.springframework.http.ContentDisposition;
|
||||
import org.springframework.http.HttpEntity;
|
||||
import org.springframework.http.HttpHeaders;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.http.RequestEntity;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.test.context.TestPropertySource;
|
||||
|
||||
@@ -45,6 +59,9 @@ public class Swordv1IT extends AbstractWebClientIntegrationTest {
|
||||
private final String DEPOSIT_PATH = "/sword/deposit";
|
||||
private final String MEDIA_LINK_PATH = "/sword/media-link";
|
||||
|
||||
// ATOM Content type returned by SWORDv1
|
||||
private final String ATOM_CONTENT_TYPE = "application/atom+xml;charset=UTF-8";
|
||||
|
||||
@Before
|
||||
public void onlyRunIfConfigExists() {
|
||||
// These integration tests REQUIRE that SWORDWebConfig is found/available (as this class deploys SWORD)
|
||||
@@ -93,10 +110,76 @@ public class Swordv1IT extends AbstractWebClientIntegrationTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
@Ignore
|
||||
public void depositTest() throws Exception {
|
||||
// TODO: Actually test a full deposit via SWORD.
|
||||
// Currently, we are just ensuring the /deposit endpoint exists (see above) and isn't throwing a 404
|
||||
context.turnOffAuthorisationSystem();
|
||||
// Create a top level community and one Collection
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Parent Community")
|
||||
.build();
|
||||
// Make sure our Collection allows the "eperson" user to submit into it
|
||||
Collection collection = CollectionBuilder.createCollection(context, parentCommunity)
|
||||
.withName("Test SWORDv1 Collection")
|
||||
.withSubmitterGroup(eperson)
|
||||
.build();
|
||||
// Above changes MUST be committed to the database for SWORDv2 to see them.
|
||||
context.commit();
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
// Specify zip file
|
||||
// NOTE: We are using the same "example.zip" as SWORDv2IT because that same ZIP is valid for both v1 and v2
|
||||
FileSystemResource zipFile = new FileSystemResource(Path.of("src", "test", "resources", "org",
|
||||
"dspace", "app", "sword2", "example.zip"));
|
||||
|
||||
// Add required headers
|
||||
HttpHeaders headers = new HttpHeaders();
|
||||
headers.setContentType(MediaType.valueOf("application/zip"));
|
||||
headers.setContentDisposition(ContentDisposition.attachment().filename("example.zip").build());
|
||||
headers.set("X-Packaging", "http://purl.org/net/sword-types/METSDSpaceSIP");
|
||||
headers.setAccept(List.of(MediaType.APPLICATION_ATOM_XML));
|
||||
|
||||
//----
|
||||
// STEP 1: Verify upload/submit via SWORDv1 works
|
||||
//----
|
||||
// Send POST to upload Zip file via SWORD
|
||||
ResponseEntity<String> response = postResponseAsString(DEPOSIT_PATH + "/" + collection.getHandle(),
|
||||
eperson.getEmail(), password,
|
||||
new HttpEntity<>(zipFile.getInputStream().readAllBytes(),
|
||||
headers));
|
||||
|
||||
// Expect a 201 CREATED response with ATOM content returned
|
||||
assertEquals(HttpStatus.CREATED, response.getStatusCode());
|
||||
assertEquals(ATOM_CONTENT_TYPE, response.getHeaders().getContentType().toString());
|
||||
|
||||
// MUST return a "Location" header which is the "/sword/media-link/*" URI of the zip file bitstream within
|
||||
// the created item (e.g. /sword/media-link/[handle-prefix]/[handle-suffix]/bitstream/[uuid])
|
||||
assertNotNull(response.getHeaders().getLocation());
|
||||
String mediaLink = response.getHeaders().getLocation().toString();
|
||||
|
||||
// Body should include the SWORD version in generator tag
|
||||
MatcherAssert.assertThat(response.getBody(),
|
||||
containsString("<atom:generator uri=\"http://www.dspace.org/ns/sword/1.3.1\"" +
|
||||
" version=\"1.3\"/>"));
|
||||
// Verify Item title also is returned in the body
|
||||
MatcherAssert.assertThat(response.getBody(), containsString("Attempts to detect retrotransposition"));
|
||||
|
||||
//----
|
||||
// STEP 2: Verify /media-link access works
|
||||
//----
|
||||
// Media-Link URI should work when requested by the EPerson who did the deposit
|
||||
HttpHeaders authHeaders = new HttpHeaders();
|
||||
authHeaders.setBasicAuth(eperson.getEmail(), password);
|
||||
RequestEntity request = RequestEntity.get(mediaLink)
|
||||
.accept(MediaType.valueOf("application/atom+xml"))
|
||||
.headers(authHeaders)
|
||||
.build();
|
||||
response = responseAsString(request);
|
||||
|
||||
// Expect a 200 response with ATOM feed content returned
|
||||
assertEquals(HttpStatus.OK, response.getStatusCode());
|
||||
assertEquals(ATOM_CONTENT_TYPE, response.getHeaders().getContentType().toString());
|
||||
// Body should include a link to the zip bitstream in the newly created Item
|
||||
// This just verifies "example.zip" exists in the body.
|
||||
MatcherAssert.assertThat(response.getBody(), containsString("example.zip"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -105,13 +188,8 @@ public class Swordv1IT extends AbstractWebClientIntegrationTest {
|
||||
ResponseEntity<String> response = getResponseAsString(MEDIA_LINK_PATH);
|
||||
// Expect a 401 response code
|
||||
assertThat(response.getStatusCode(), equalTo(HttpStatus.UNAUTHORIZED));
|
||||
}
|
||||
|
||||
@Test
|
||||
@Ignore
|
||||
public void mediaLinkTest() throws Exception {
|
||||
// TODO: Actually test a /media-link request.
|
||||
// Currently, we are just ensuring the /media-link endpoint exists (see above) and isn't throwing a 404
|
||||
//NOTE: An authorized /media-link test is performed in depositTest() above.
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
|
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@@ -4,20 +4,21 @@ Article = journal-article
|
||||
Book = book
|
||||
Book\ chapter = book-chapter
|
||||
Dataset = data-set
|
||||
Learning\ Object = other
|
||||
Image = other
|
||||
Image,\ 3-D = other
|
||||
Map = other
|
||||
Musical\ Score = other
|
||||
Learning\ Object = learning-object
|
||||
Image = image
|
||||
Image,\ 3-D = image
|
||||
Journal = journal-issue
|
||||
Map = cartographic-material
|
||||
Musical\ Score = musical-composition
|
||||
Plan\ or\ blueprint = other
|
||||
Preprint = preprint
|
||||
Presentation = other
|
||||
Recording,\ acoustical = other
|
||||
Recording,\ musical = other
|
||||
Recording,\ oral = other
|
||||
Recording,\ acoustical = sound
|
||||
Recording,\ musical = sound
|
||||
Recording,\ oral = sound
|
||||
Software = software
|
||||
Technical\ Report = other
|
||||
Thesis = other
|
||||
Video = other
|
||||
Technical\ Report = report
|
||||
Thesis = dissertation-thesis
|
||||
Video = moving-image
|
||||
Working\ Paper = working-paper
|
||||
Other = other
|
@@ -18,6 +18,9 @@ assetstore.dir = ${dspace.dir}/assetstore
|
||||
# `bitstore.xml` configuration.
|
||||
assetstore.index.primary = 0
|
||||
|
||||
#if the assetstore path is symbolic link, use this configuration to allow that path.
|
||||
#assetstore.allowed.roots = /data/assetstore
|
||||
|
||||
#---------------------------------------------------------------#
|
||||
#-------------- Amazon S3 Specific Configurations --------------#
|
||||
#---------------------------------------------------------------#
|
||||
|
@@ -45,6 +45,9 @@ epo.searchUrl = https://ops.epo.org/rest-services/published-data/search
|
||||
#################################################################
|
||||
#---------------------- PubMed -----------------------------#
|
||||
#---------------------------------------------------------------#
|
||||
# If apiKey is set then it's used, if not set or blank then it's not
|
||||
# Max amount of requests per ip per second with apiKey is 10; without 3
|
||||
pubmed.apiKey =
|
||||
pubmed.url.search = https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi
|
||||
pubmed.url.fetch = https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi
|
||||
#################################################################
|
||||
|
@@ -1,4 +1,3 @@
|
||||
|
||||
#------------------------------------------------------------------#
|
||||
#--------------------ORCID GENERIC CONFIGURATIONS------------------#
|
||||
#------------------------------------------------------------------#
|
||||
@@ -58,12 +57,18 @@ orcid.mapping.work.contributors = dc.contributor.editor::editor
|
||||
|
||||
##orcid.mapping.work.external-ids syntax is <metadatafield>::<type> or $simple-handle::<type>
|
||||
##The full list of available external identifiers is available here https://pub.orcid.org/v3.0/identifiers
|
||||
# The identifiers need to have a relationship of SELF, PART_OF, VERSION_OF or FUNDED_BY.
|
||||
# The default for most identifiers is SELF. The default for identifiers more commonly
|
||||
# associated with 'parent' publciations (ISSN, ISBN) is PART_OF.
|
||||
# See the map in `orcid-services.xml`
|
||||
# VERSION_OF and FUNDED_BY are not currently implemented.
|
||||
orcid.mapping.work.external-ids = dc.identifier.doi::doi
|
||||
orcid.mapping.work.external-ids = dc.identifier.scopus::eid
|
||||
orcid.mapping.work.external-ids = dc.identifier.pmid::pmid
|
||||
orcid.mapping.work.external-ids = $simple-handle::handle
|
||||
orcid.mapping.work.external-ids = dc.identifier.isi::wosuid
|
||||
orcid.mapping.work.external-ids = dc.identifier.issn::issn
|
||||
orcid.mapping.work.external-ids = dc.identifier.isbn::isbn
|
||||
|
||||
### Funding mapping ###
|
||||
orcid.mapping.funding.title = dc.title
|
||||
@@ -141,6 +146,9 @@ orcid.bulk-synchronization.max-attempts = 5
|
||||
#--------------------ORCID EXTERNAL DATA MAPPING-------------------#
|
||||
#------------------------------------------------------------------#
|
||||
|
||||
# Note - the below mapping is for ORCID->DSpace imports, not for
|
||||
# DSpace->ORCID exports (see orcid.mapping.work.*)
|
||||
|
||||
### Work (Publication) external-data.mapping ###
|
||||
orcid.external-data.mapping.publication.title = dc.title
|
||||
|
||||
|
@@ -61,3 +61,20 @@ usage-statistics.shardedByYear = false
|
||||
|
||||
# Only anonymize statistics records older than this threshold (expressed in days)
|
||||
#anonymize_statistics.time_threshold = 90
|
||||
|
||||
# Maximum number of items to display in the usage statistics report for an entire repository
|
||||
usage-statistics.topItemsLimit = 10
|
||||
|
||||
# Number of months to begin retrieving usage statistics for total visits per month of a DSpace object
|
||||
# For example, -6 means include the previous six months
|
||||
usage-statistics.startDateInterval = -6
|
||||
|
||||
# Number of months to end retrieving usage statistics for total visits per month of a DSpace object
|
||||
# For example, +1 means include the current month
|
||||
usage-statistics.endDateInterval = +1
|
||||
|
||||
# Maximum number of countries to display in the usage statistics reports
|
||||
usage-statistics.topCountriesLimit = 100
|
||||
|
||||
# Maximum number of cities to display in the usage statistics reports
|
||||
usage-statistics.topCitiesLimit = 100
|
||||
|
@@ -55,6 +55,27 @@
|
||||
<bean id="orcidWorkFactoryFieldMapping" class="org.dspace.orcid.model.OrcidWorkFieldMapping" >
|
||||
<property name="contributorFields" value="${orcid.mapping.work.contributors}" />
|
||||
<property name="externalIdentifierFields" value="${orcid.mapping.work.external-ids}" />
|
||||
<property name="externalIdentifierPartOfMap">
|
||||
<map>
|
||||
<entry key="issn">
|
||||
<list>
|
||||
<value>journal-article</value>
|
||||
<value>magazine-article</value>
|
||||
<value>newspaper-article</value>
|
||||
<value>data-set</value>
|
||||
<value>learning-object</value>
|
||||
<value>other</value>
|
||||
</list>
|
||||
</entry>
|
||||
<entry key="isbn">
|
||||
<list>
|
||||
<value>book-chapter</value>
|
||||
<value>book-review</value>
|
||||
<value>other</value>
|
||||
</list>
|
||||
</entry>
|
||||
</map>
|
||||
</property>
|
||||
<property name="publicationDateField" value="${orcid.mapping.work.publication-date}" />
|
||||
<property name="titleField" value="${orcid.mapping.work.title}" />
|
||||
<property name="journalTitleField" value="${orcid.mapping.work.journal-title}" />
|
||||
|
@@ -21,7 +21,6 @@
|
||||
<property name="minimumAcceptanceScore" value="3" />
|
||||
</bean>
|
||||
|
||||
|
||||
<bean id="autoassignactionAPI" class="org.dspace.xmlworkflow.state.actions.userassignment.AutoAssignAction" scope="prototype"/>
|
||||
<bean id="noUserSelectionActionAPI" class="org.dspace.xmlworkflow.state.actions.userassignment.NoUserSelectionAction" scope="prototype"/>
|
||||
<bean id="assignoriginalsubmitteractionAPI" class="org.dspace.xmlworkflow.state.actions.userassignment.AssignOriginalSubmitterAction" scope="prototype"/>
|
||||
@@ -44,7 +43,6 @@
|
||||
<property name="requiresUI" value="true"/>
|
||||
</bean>
|
||||
|
||||
|
||||
<!--Action for the select single reviewer workflow -->
|
||||
<bean id="selectrevieweraction" class="org.dspace.xmlworkflow.state.actions.WorkflowActionConfig" scope="prototype">
|
||||
<constructor-arg type="java.lang.String" value="selectrevieweraction"/>
|
||||
@@ -64,21 +62,14 @@
|
||||
<property name="requiresUI" value="true"/>
|
||||
</bean>
|
||||
|
||||
<bean id="ratingreviewaction" class="org.dspace.xmlworkflow.state.actions.WorkflowActionConfig" scope="prototype">
|
||||
<constructor-arg type="java.lang.String" value="ratingreviewaction"/>
|
||||
<property name="processingAction" ref="ratingreviewactionAPI" />
|
||||
<property name="requiresUI" value="true"/>
|
||||
</bean>
|
||||
|
||||
<!--Autmatic step that evaluates scores (workflow.score) and checks if they match the configured minimum for archiving -->
|
||||
<!--Automatic step that evaluates scores (workflow.score) and checks if they match the configured minimum for archiving -->
|
||||
<bean id="evaluationaction" class="org.dspace.xmlworkflow.state.actions.WorkflowActionConfig" scope="prototype">
|
||||
<constructor-arg type="java.lang.String" value="evaluationaction"/>
|
||||
<property name="processingAction" ref="evaluationactionAPI" />
|
||||
<property name="requiresUI" value="false"/>
|
||||
</bean>
|
||||
|
||||
|
||||
<!--User selection actions-->
|
||||
<!--User selection actions-->
|
||||
<bean id="claimaction" class="org.dspace.xmlworkflow.state.actions.UserSelectionActionConfig" scope="prototype">
|
||||
<constructor-arg type="java.lang.String" value="claimaction"/>
|
||||
<property name="processingAction" ref="claimactionAPI"/>
|
||||
|
@@ -17,7 +17,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>modules</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>modules</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@@ -13,7 +13,7 @@ just adding new jar in the classloader</description>
|
||||
<parent>
|
||||
<artifactId>modules</artifactId>
|
||||
<groupId>org.dspace</groupId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@@ -16,7 +16,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@@ -324,7 +324,7 @@
|
||||
<dynamicField name="*_mlt" type="text" indexed="true" stored="true" multiValued="true" omitNorms="true" termVectors="true" termPositions="true" termOffsets="true"/>
|
||||
|
||||
<!--Date matching-->
|
||||
<dynamicField name="*.year" type="sint" indexed="true" stored="true" multiValued="true" omitNorms="true" />
|
||||
<dynamicField name="*_year" type="sint" indexed="true" stored="true" multiValued="true" omitNorms="true" />
|
||||
<dynamicField name="*_dt" type="date" indexed="true" stored="true" multiValued="false" omitNorms="true" docValues="true"/>
|
||||
|
||||
<!--Used for matching on all other fields -->
|
||||
|
86
pom.xml
86
pom.xml
@@ -4,7 +4,7 @@
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<packaging>pom</packaging>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<name>DSpace Parent Project</name>
|
||||
<description>
|
||||
DSpace open source software is a turnkey institutional repository application.
|
||||
@@ -27,18 +27,18 @@
|
||||
<postgresql.driver.version>42.7.7</postgresql.driver.version>
|
||||
<solr.client.version>8.11.4</solr.client.version>
|
||||
|
||||
<ehcache.version>3.10.8</ehcache.version>
|
||||
<ehcache.version>3.11.1</ehcache.version>
|
||||
<errorprone.version>2.31.0</errorprone.version>
|
||||
<!-- NOTE: when updating jackson.version, also sync jackson-databind.version below -->
|
||||
<jackson.version>2.19.1</jackson.version>
|
||||
<jackson-databind.version>2.19.1</jackson-databind.version>
|
||||
<jackson.version>2.19.2</jackson.version>
|
||||
<jackson-databind.version>2.19.2</jackson-databind.version>
|
||||
<javax-annotation.version>1.3.2</javax-annotation.version>
|
||||
<jaxb-api.version>2.3.1</jaxb-api.version>
|
||||
<jaxb-runtime.version>2.3.9</jaxb-runtime.version>
|
||||
<jcache-version>1.1.1</jcache-version>
|
||||
<!-- NOTE: Jetty needed for Solr, Handle Server & tests -->
|
||||
<jetty.version>9.4.57.v20241219</jetty.version>
|
||||
<log4j.version>2.25.0</log4j.version>
|
||||
<jetty.version>9.4.58.v20250814</jetty.version>
|
||||
<log4j.version>2.25.1</log4j.version>
|
||||
<pdfbox-version>2.0.34</pdfbox-version>
|
||||
<rome.version>1.19.0</rome.version>
|
||||
<slf4j.version>1.7.36</slf4j.version>
|
||||
@@ -89,7 +89,7 @@
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-enforcer-plugin</artifactId>
|
||||
<version>3.5.0</version>
|
||||
<version>3.6.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>enforce-java</id>
|
||||
@@ -295,7 +295,7 @@
|
||||
<plugin>
|
||||
<groupId>com.github.spotbugs</groupId>
|
||||
<artifactId>spotbugs-maven-plugin</artifactId>
|
||||
<version>4.9.3.0</version>
|
||||
<version>4.9.4.2</version>
|
||||
<configuration>
|
||||
<effort>Max</effort>
|
||||
<threshold>Low</threshold>
|
||||
@@ -305,7 +305,7 @@
|
||||
<dependency>
|
||||
<groupId>com.github.spotbugs</groupId>
|
||||
<artifactId>spotbugs</artifactId>
|
||||
<version>4.9.3</version>
|
||||
<version>4.9.4</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<executions>
|
||||
@@ -363,7 +363,7 @@
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-javadoc-plugin</artifactId>
|
||||
<version>3.11.2</version>
|
||||
<version>3.11.3</version>
|
||||
<configuration>
|
||||
<!-- Never fail a build based on Javadoc errors -->
|
||||
<failOnError>false</failOnError>
|
||||
@@ -515,10 +515,10 @@
|
||||
</build>
|
||||
|
||||
<profiles>
|
||||
<!-- Allow for passing extra memory to Unit/Integration tests.
|
||||
By default this gives unit tests 1GB of memory max (when tests are enabled),
|
||||
unless tweaked on commandline (e.g. "-Dtest.argLine=-Xmx512m"). Since
|
||||
m-surefire-p and m-failsafe-p both fork a new JVM for testing, they ignores MAVEN_OPTS. -->
|
||||
<!-- Profile which sets our default system properties for all Unit/Integration tests.
|
||||
By default, this sets UTF-8 encoding for all tests and gives tests 1GB of memory max.
|
||||
Both m-surefire-p and m-failsafe-p are unable to use MAVEN_OPTS because they fork a new JVM for testing.
|
||||
These default settings may be overridden via the commandline (e.g. "-Dtest.argLine=-Xmx512m"). -->
|
||||
<profile>
|
||||
<id>test-argLine</id>
|
||||
<activation>
|
||||
@@ -527,7 +527,7 @@
|
||||
</property>
|
||||
</activation>
|
||||
<properties>
|
||||
<test.argLine>-Xmx1024m</test.argLine>
|
||||
<test.argLine>-Xmx1024m -Dfile.encoding=UTF-8</test.argLine>
|
||||
</properties>
|
||||
</profile>
|
||||
|
||||
@@ -680,7 +680,7 @@
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>license-maven-plugin</artifactId>
|
||||
<version>2.5.0</version>
|
||||
<version>2.6.0</version>
|
||||
<!-- This plugin only needs to be run on the Parent POM
|
||||
as it aggregates results from all child POMs. -->
|
||||
<inherited>false</inherited>
|
||||
@@ -875,14 +875,14 @@
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-rest</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<type>jar</type>
|
||||
<classifier>classes</classifier>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-rest</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<type>war</type>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
@@ -1031,69 +1031,69 @@
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-api</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-api</artifactId>
|
||||
<type>test-jar</type>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.dspace.modules</groupId>
|
||||
<artifactId>additions</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-sword</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-swordv2</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-oai</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-services</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-server-webapp</artifactId>
|
||||
<type>test-jar</type>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-rdf</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-iiif</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-server-webapp</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<type>jar</type>
|
||||
<classifier>classes</classifier>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-server-webapp</artifactId>
|
||||
<version>7.6.4</version>
|
||||
<version>7.6.6-SNAPSHOT</version>
|
||||
<type>war</type>
|
||||
</dependency>
|
||||
<!-- DSpace API Localization Packages -->
|
||||
@@ -1324,7 +1324,7 @@
|
||||
<dependency>
|
||||
<groupId>org.apache.james</groupId>
|
||||
<artifactId>apache-mime4j-core</artifactId>
|
||||
<version>0.8.12</version>
|
||||
<version>0.8.13</version>
|
||||
</dependency>
|
||||
<!-- Tika and solr-core disagree on versions of ASM -->
|
||||
<dependency>
|
||||
@@ -1473,12 +1473,12 @@
|
||||
<dependency>
|
||||
<groupId>commons-cli</groupId>
|
||||
<artifactId>commons-cli</artifactId>
|
||||
<version>1.9.0</version>
|
||||
<version>1.10.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-codec</groupId>
|
||||
<artifactId>commons-codec</artifactId>
|
||||
<version>1.18.0</version>
|
||||
<version>1.19.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
@@ -1498,12 +1498,12 @@
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
<artifactId>commons-io</artifactId>
|
||||
<version>2.19.0</version>
|
||||
<version>2.20.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<version>3.17.0</version>
|
||||
<version>3.18.0</version>
|
||||
</dependency>
|
||||
<!-- NOTE: We don't use commons-logging or commons-compress directly,
|
||||
but many dependencies rely on them. Only here specified to avoid dependency convergence issues. -->
|
||||
@@ -1515,7 +1515,7 @@
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-compress</artifactId>
|
||||
<version>1.27.1</version>
|
||||
<version>1.28.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
@@ -1525,12 +1525,12 @@
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-text</artifactId>
|
||||
<version>1.13.1</version>
|
||||
<version>1.14.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-validator</groupId>
|
||||
<artifactId>commons-validator</artifactId>
|
||||
<version>1.9.0</version>
|
||||
<version>1.10.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
@@ -1701,7 +1701,7 @@
|
||||
<dependency>
|
||||
<groupId>com.google.http-client</groupId>
|
||||
<artifactId>google-http-client</artifactId>
|
||||
<version>1.47.0</version>
|
||||
<version>1.47.1</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.google.errorprone</groupId>
|
||||
@@ -1718,12 +1718,12 @@
|
||||
<dependency>
|
||||
<groupId>io.grpc</groupId>
|
||||
<artifactId>grpc-context</artifactId>
|
||||
<version>1.73.0</version>
|
||||
<version>1.75.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.http-client</groupId>
|
||||
<artifactId>google-http-client-jackson2</artifactId>
|
||||
<version>1.47.0</version>
|
||||
<version>1.47.1</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
@@ -1745,7 +1745,7 @@
|
||||
<dependency>
|
||||
<groupId>com.google.http-client</groupId>
|
||||
<artifactId>google-http-client-gson</artifactId>
|
||||
<version>1.47.0</version>
|
||||
<version>1.47.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.squareup.okhttp3</groupId>
|
||||
@@ -1939,7 +1939,7 @@
|
||||
<connection>scm:git:git@github.com:DSpace/DSpace.git</connection>
|
||||
<developerConnection>scm:git:git@github.com:DSpace/DSpace.git</developerConnection>
|
||||
<url>https://github.com/DSpace/DSpace</url>
|
||||
<tag>dspace-7.6.4</tag>
|
||||
<tag>dspace-7_x</tag>
|
||||
</scm>
|
||||
|
||||
<repositories>
|
||||
|
Reference in New Issue
Block a user