Compare commits

..

3 Commits

Author SHA1 Message Date
Tim Donohue
b79880662b Merge tag 'dspace-9.1' into test_merge_minor
[maven-release-plugin] copy for tag dspace-9.1
2025-07-14 16:46:03 -05:00
Tim Donohue
edda604462 Merge tag 'dspace-8.2' into test_merge_minor
[maven-release-plugin] copy for tag dspace-8.2
2025-07-14 16:45:55 -05:00
Tim Donohue
ffcc65a07f Merge tag 'dspace-7.6.4' into test_merge_minor
[maven-release-plugin] copy for tag dspace-7.6.4
2025-07-14 16:45:46 -05:00
174 changed files with 3880 additions and 5537 deletions

View File

@@ -4,6 +4,7 @@
*/target/
dspace/modules/*/target/
Dockerfile.*
dspace/src/main/docker/dspace-postgres-loadsql
dspace/src/main/docker/dspace-postgres-pgcrypto
dspace/src/main/docker/dspace-postgres-pgcrypto-curl
dspace/src/main/docker/README.md
dspace/src/main/docker-compose/

View File

@@ -11,9 +11,8 @@ updates:
# So, only this first section can include "applies-to: security-updates"
- package-ecosystem: "maven"
directory: "/"
# Monthly dependency updates (NOTE: "schedule" doesn't apply to security updates)
schedule:
interval: "monthly"
interval: "weekly"
time: "02:00"
# Allow up to 10 open PRs for dependencies
open-pull-requests-limit: 10
@@ -134,7 +133,7 @@ updates:
directory: "/"
target-branch: dspace-9_x
schedule:
interval: "monthly"
interval: "weekly"
time: "02:00"
# Allow up to 10 open PRs for dependencies
open-pull-requests-limit: 10
@@ -255,7 +254,7 @@ updates:
directory: "/"
target-branch: dspace-8_x
schedule:
interval: "monthly"
interval: "weekly"
time: "02:00"
# Allow up to 10 open PRs for dependencies
open-pull-requests-limit: 10
@@ -376,7 +375,7 @@ updates:
directory: "/"
target-branch: dspace-7_x
schedule:
interval: "monthly"
interval: "weekly"
time: "02:00"
# Allow up to 10 open PRs for dependencies
open-pull-requests-limit: 10

View File

@@ -47,7 +47,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
# https://github.com/github/codeql-action
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
uses: github/codeql-action/init@v2
with:
# Codescan Javascript as well since a few JS files exist in REST API's interface
languages: java, javascript
@@ -56,8 +56,8 @@ jobs:
# NOTE: Based on testing, this autobuild process works well for DSpace. A custom
# DSpace build w/caching (like in build.yml) was about the same speed as autobuild.
- name: Autobuild
uses: github/codeql-action/autobuild@v3
uses: github/codeql-action/autobuild@v2
# Perform GitHub Code Scanning.
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
uses: github/codeql-action/analyze@v2

View File

@@ -113,19 +113,39 @@ jobs:
REDEPLOY_SANDBOX_URL: ${{ secrets.REDEPLOY_SANDBOX_SOLR_URL }}
REDEPLOY_DEMO_URL: ${{ secrets.REDEPLOY_DEMO_SOLR_URL }}
########################################################
# Build/Push the 'dspace/dspace-postgres-loadsql' image
########################################################
dspace-postgres-loadsql:
###########################################################
# Build/Push the 'dspace/dspace-postgres-pgcrypto' image
###########################################################
dspace-postgres-pgcrypto:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace'
uses: ./.github/workflows/reusable-docker-build.yml
with:
build_id: dspace-postgres-loadsql
image_name: dspace/dspace-postgres-loadsql
# Must build out of subdirectory to have access to install script.
build_id: dspace-postgres-pgcrypto-prod
image_name: dspace/dspace-postgres-pgcrypto
# Must build out of subdirectory to have access to install script for pgcrypto.
# NOTE: this context will build the image based on the Dockerfile in the specified directory
dockerfile_context: ./dspace/src/main/docker/dspace-postgres-loadsql/
dockerfile_context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
########################################################################
# Build/Push the 'dspace/dspace-postgres-pgcrypto' image (-loadsql tag)
########################################################################
dspace-postgres-pgcrypto-loadsql:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace'
uses: ./.github/workflows/reusable-docker-build.yml
with:
build_id: dspace-postgres-pgcrypto-loadsql
image_name: dspace/dspace-postgres-pgcrypto
# Must build out of subdirectory to have access to install script for pgcrypto.
# NOTE: this context will build the image based on the Dockerfile in the specified directory
dockerfile_context: ./dspace/src/main/docker/dspace-postgres-pgcrypto-curl/
# Suffix all tags with "-loadsql". Otherwise, it uses the same
# tagging logic as the primary 'dspace/dspace-postgres-pgcrypto' image above.
tags_flavor: suffix=-loadsql
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
@@ -138,7 +158,7 @@ jobs:
if: github.repository == 'dspace/dspace'
runs-on: ubuntu-latest
# Must run after all major images are built
needs: [dspace, dspace-test, dspace-cli, dspace-solr]
needs: [dspace, dspace-test, dspace-cli, dspace-postgres-pgcrypto, dspace-solr]
env:
# Override defaults dspace.server.url because backend starts at http://127.0.0.1:8080
dspace__P__server__P__url: http://127.0.0.1:8080/server
@@ -200,19 +220,6 @@ jobs:
result=$(wget -O- -q http://127.0.0.1:8080/server/api/core/collections)
echo "$result"
echo "$result" | grep -oE "\"Dog in Yard\","
# Verify basic backend logging is working.
# 1. Access the top communities list. Verify that the "Before request" INFO statement is logged
# 2. Access an invalid endpoint (and ignore 404 response). Verify that a "status:404" WARN statement is logged
- name: Verify backend is logging properly
run: |
wget -O/dev/null -q http://127.0.0.1:8080/server/api/core/communities/search/top
logs=$(docker compose -f docker-compose.yml logs -n 5 dspace)
echo "$logs"
echo "$logs" | grep -o "Before request \[GET /server/api/core/communities/search/top\]"
wget -O/dev/null -q http://127.0.0.1:8080/server/api/does/not/exist || true
logs=$(docker compose -f docker-compose.yml logs -n 5 dspace)
echo "$logs"
echo "$logs" | grep -o "status:404 exception: The repository type does.not was not found"
# Verify Handle Server can be stared and is working properly
# 1. First generate the "[dspace]/handle-server" folder with the sitebndl.zip
# 2. Start the Handle Server (and wait 20 seconds to let it start up)

View File

@@ -164,7 +164,7 @@ jobs:
# Use GitHub cache to load cached Docker images and cache the results of this build
# This decreases the number of images we need to fetch from DockerHub
cache-from: type=gha,scope=${{ inputs.build_id }}
cache-to: type=gha,scope=${{ inputs.build_id }},mode=min
cache-to: type=gha,scope=${{ inputs.build_id }},mode=max
# Export the digest of Docker build locally
- name: Export Docker build digest
@@ -216,7 +216,7 @@ jobs:
# Use GitHub cache to load cached Docker images and cache the results of this build
# This decreases the number of images we need to fetch from DockerHub
cache-from: type=gha,scope=${{ inputs.build_id }}
cache-to: type=gha,scope=${{ inputs.build_id }},mode=min
cache-to: type=gha,scope=${{ inputs.build_id }},mode=max
# Export image to a local TAR file
outputs: type=docker,dest=/tmp/${{ inputs.build_id }}.tar

View File

@@ -106,9 +106,6 @@ For more information on CheckStyle configurations below, see: http://checkstyle.
<!-- Right braces should be on start of a new line (default value) -->
<module name="RightCurly"/>
<!-- Enforce Java-style array declaration instead of C-style -->
<module name="ArrayTypeStyle"/>
<!-- ##### Indentation / Whitespace requirements ##### -->
<!-- Require 4-space indentation (default value) -->
<module name="Indentation"/>

View File

@@ -65,12 +65,13 @@ services:
# DSpace PostgreSQL database container
dspacedb:
container_name: dspacedb
# Uses the base PostgreSQL image
image: "docker.io/postgres:${POSTGRES_VERSION:-15}"
# Uses a custom Postgres image with pgcrypto installed
image: "${DOCKER_REGISTRY:-docker.io}/${DOCKER_OWNER:-dspace}/dspace-postgres-pgcrypto:${DSPACE_VER:-latest}"
build:
# Must build out of subdirectory to have access to install script for pgcrypto
context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/
environment:
PGDATA: /pgdata
POSTGRES_DB: dspace
POSTGRES_USER: dspace
POSTGRES_PASSWORD: dspace
networks:
dspacenet:

View File

@@ -99,6 +99,20 @@
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>3.6.1</version>
<executions>
<execution>
<phase>validate</phase>
<goals>
<goal>maven-version</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>buildnumber-maven-plugin</artifactId>
@@ -720,7 +734,7 @@
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-s3</artifactId>
<version>1.12.791</version>
<version>1.12.785</version>
</dependency>
<!-- TODO: This may need to be replaced with the "orcid-model" artifact once this ticket is resolved:
@@ -761,7 +775,7 @@
<dependency>
<groupId>com.opencsv</groupId>
<artifactId>opencsv</artifactId>
<version>5.12.0</version>
<version>5.11.1</version>
</dependency>
<!-- Email templating -->
@@ -774,7 +788,7 @@
<dependency>
<groupId>org.xmlunit</groupId>
<artifactId>xmlunit-core</artifactId>
<version>2.10.4</version>
<version>2.10.2</version>
<scope>test</scope>
</dependency>
@@ -915,7 +929,7 @@
<dependency>
<groupId>org.xhtmlrenderer</groupId>
<artifactId>flying-saucer-pdf</artifactId>
<version>9.13.3</version>
<version>9.12.0</version>
<exclusions>
<!-- Conflicts with Hibernate. Use version that is brought in via Hibernate -->
<exclusion>

View File

@@ -46,7 +46,6 @@ import org.dspace.app.util.XMLUtils;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.MetadataFieldName;
import org.dspace.content.MetadataSchemaEnum;
@@ -54,7 +53,6 @@ import org.dspace.content.MetadataValue;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
@@ -171,10 +169,6 @@ public class StructBuilder {
.desc("File to receive the structure map ('-' for standard out).")
.hasArg().argName("output").required().build());
options.addOption(Option.builder("p").longOpt("parent")
.desc("Parent community or handle (optional)")
.hasArg().argName("parent").required(false).build());
// Parse the command line.
CommandLineParser parser = new DefaultParser();
CommandLine line = null;
@@ -208,11 +202,6 @@ public class StructBuilder {
outputStream = new FileOutputStream(output);
}
String parentID = null;
if (line.hasOption('p')) {
parentID = line.getOptionValue('p');
}
// create a context
Context context = new Context();
@@ -225,30 +214,6 @@ public class StructBuilder {
System.exit(1);
}
// Resolve optional "parent community" ID or handle to a community
Community parent = null;
if (parentID != null) {
DSpaceObject dso = handleService.resolveToObject(context, parentID);
if (dso != null) {
if (dso.getType() == Constants.COMMUNITY) {
parent = (Community) dso;
} else {
System.out.println("The handle provided for the -p option does not resolve to a community. " +
parentID + " is an object of type: " + Constants.typeText[dso.getType()]);
System.exit(0);
}
} else {
// Not a handle, see if it is an ID
Community community = communityService.findByIdOrLegacyId(context, parentID);
if (community != null) {
parent = community;
} else {
System.out.println("The value provided for -p is not a valid community ID or handle: " + parentID);
System.exit(0);
}
}
}
// Export? Import?
if (line.hasOption('x')) { // export
exportStructure(context, outputStream);
@@ -268,7 +233,7 @@ public class StructBuilder {
}
boolean keepHandles = options.hasOption("k");
importStructure(context, inputStream, outputStream, parent, keepHandles);
importStructure(context, inputStream, outputStream, keepHandles);
inputStream.close();
outputStream.close();
@@ -285,7 +250,6 @@ public class StructBuilder {
* @param context
* @param input XML which describes the new communities and collections.
* @param output input, annotated with the new objects' identifiers.
* @param parent Community beneath which to attach this structure
* @param keepHandles true if Handles should be set from input.
* @throws IOException
* @throws ParserConfigurationException
@@ -294,7 +258,7 @@ public class StructBuilder {
* @throws SQLException
*/
static void importStructure(Context context, InputStream input,
OutputStream output, Community parent, boolean keepHandles)
OutputStream output, boolean keepHandles)
throws IOException, ParserConfigurationException, SQLException,
TransformerException, XPathExpressionException {
@@ -361,7 +325,7 @@ public class StructBuilder {
.evaluate(document, XPathConstants.NODESET);
// run the import starting with the top level communities
elements = handleCommunities(context, first, parent, keepHandles);
elements = handleCommunities(context, first, null, keepHandles);
} catch (TransformerException ex) {
System.err.format("Input content not understood: %s%n", ex.getMessage());
System.exit(1);

View File

@@ -14,8 +14,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.DefaultParser.Builder;
import org.apache.commons.cli.ParseException;
import org.dspace.content.Item;
import org.dspace.content.MetadataDSpaceCsvExportServiceImpl;
@@ -169,14 +167,4 @@ public class MetadataExportSearch extends DSpaceRunnable<MetadataExportSearchScr
}
return scopeObj;
}
@Override
protected StepResult parse(String[] args) throws ParseException {
commandLine = new DefaultParser().parse(getScriptConfiguration().getOptions(), args);
Builder builder = new DefaultParser().builder();
builder.setStripLeadingAndTrailingQuotes(false);
commandLine = builder.build().parse(getScriptConfiguration().getOptions(), args);
setup();
return StepResult.Continue;
}
}

View File

@@ -495,7 +495,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
// Check it has an owning collection
List<String> collections = line.get("collection");
if (collections == null || collections.isEmpty()) {
if (collections == null) {
throw new MetadataImportException(
"New items must have a 'collection' assigned in the form of a handle");
}
@@ -1143,12 +1143,12 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
}
// look up the value and authority in solr
List<AuthorityValue> byValue = authorityValueService.findByValue(schema, element, qualifier, value);
List<AuthorityValue> byValue = authorityValueService.findByValue(c, schema, element, qualifier, value);
AuthorityValue authorityValue = null;
if (byValue.isEmpty()) {
String toGenerate = fromAuthority.generateString() + value;
String field = schema + "_" + element + (StringUtils.isNotBlank(qualifier) ? "_" + qualifier : "");
authorityValue = authorityValueService.generate(toGenerate, value, field);
authorityValue = authorityValueService.generate(c, toGenerate, value, field);
dcv.setAuthority(toGenerate);
} else {
authorityValue = byValue.get(0);
@@ -1560,7 +1560,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
ContentServiceFactory.getInstance().getMetadataFieldService();
int i = reference.indexOf(":");
String mfValue = reference.substring(i + 1);
String[] mf = reference.substring(0, i).split("\\.");
String mf[] = reference.substring(0, i).split("\\.");
if (mf.length < 2) {
throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" +
"Bad metadata field in reference: '" + reference

View File

@@ -353,7 +353,7 @@ public class ItemExportServiceImpl implements ItemExportService {
/**
* Create the 'collections' file. List handles of all Collections which
* contain this Item. The "owning" Collection is listed first.
* contain this Item. The "owning" Collection is listed first.
*
* @param item list collections holding this Item.
* @param destDir write the file here.
@@ -364,14 +364,12 @@ public class ItemExportServiceImpl implements ItemExportService {
File outFile = new File(destDir, "collections");
if (outFile.createNewFile()) {
try (PrintWriter out = new PrintWriter(new FileWriter(outFile))) {
Collection owningCollection = item.getOwningCollection();
// The owning collection is null for workspace and workflow items
if (owningCollection != null) {
out.println(owningCollection.getHandle());
}
String ownerHandle = item.getOwningCollection().getHandle();
out.println(ownerHandle);
for (Collection collection : item.getCollections()) {
if (!collection.equals(owningCollection)) {
out.println(collection.getHandle());
String collectionHandle = collection.getHandle();
if (!collectionHandle.equals(ownerHandle)) {
out.println(collectionHandle);
}
}
}

View File

@@ -23,7 +23,6 @@ import java.util.UUID;
import org.apache.commons.cli.ParseException;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.tika.Tika;
import org.dspace.app.itemimport.factory.ItemImportServiceFactory;
@@ -335,38 +334,33 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
protected void readZip(Context context, ItemImportService itemImportService) throws Exception {
Optional<InputStream> optionalFileStream = Optional.empty();
Optional<InputStream> validationFileStream = Optional.empty();
try {
if (!remoteUrl) {
// manage zip via upload
optionalFileStream = handler.getFileStream(context, zipfilename);
validationFileStream = handler.getFileStream(context, zipfilename);
} else {
// manage zip via remote url
optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
}
if (validationFileStream.isPresent()) {
// validate zip file
if (validationFileStream.isPresent()) {
validateZip(validationFileStream.get());
}
workFile = new File(itemImportService.getTempWorkDir() + File.separator
+ zipfilename + "-" + context.getCurrentUser().getID());
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
} else {
throw new IllegalArgumentException(
"Error reading file, the file couldn't be found for filename: " + zipfilename);
}
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
+ File.separator + context.getCurrentUser().getID());
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
} finally {
optionalFileStream.ifPresent(IOUtils::closeQuietly);
validationFileStream.ifPresent(IOUtils::closeQuietly);
if (!remoteUrl) {
// manage zip via upload
optionalFileStream = handler.getFileStream(context, zipfilename);
validationFileStream = handler.getFileStream(context, zipfilename);
} else {
// manage zip via remote url
optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
}
if (validationFileStream.isPresent()) {
// validate zip file
if (validationFileStream.isPresent()) {
validateZip(validationFileStream.get());
}
workFile = new File(itemImportService.getTempWorkDir() + File.separator
+ zipfilename + "-" + context.getCurrentUser().getID());
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
} else {
throw new IllegalArgumentException(
"Error reading file, the file couldn't be found for filename: " + zipfilename);
}
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
+ File.separator + context.getCurrentUser().getID());
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
}
/**

View File

@@ -17,7 +17,6 @@ import java.util.Optional;
import java.util.UUID;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.dspace.app.itemimport.service.ItemImportService;
import org.dspace.content.Collection;
@@ -112,11 +111,7 @@ public class ItemImportCLI extends ItemImport {
// validate zip file
InputStream validationFileStream = new FileInputStream(myZipFile);
try {
validateZip(validationFileStream);
} finally {
IOUtils.closeQuietly(validationFileStream);
}
validateZip(validationFileStream);
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
+ File.separator + context.getCurrentUser().getID());
@@ -125,28 +120,22 @@ public class ItemImportCLI extends ItemImport {
} else {
// manage zip via remote url
Optional<InputStream> optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
Optional<InputStream> validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
try {
if (optionalFileStream.isPresent()) {
// validate zip file via url
if (validationFileStream.isPresent()) {
validateZip(validationFileStream.get());
}
workFile = new File(itemImportService.getTempWorkDir() + File.separator
+ zipfilename + "-" + context.getCurrentUser().getID());
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
+ File.separator + context.getCurrentUser().getID());
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
} else {
throw new IllegalArgumentException(
"Error reading file, the file couldn't be found for filename: " + zipfilename);
if (optionalFileStream.isPresent()) {
// validate zip file via url
Optional<InputStream> validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
if (validationFileStream.isPresent()) {
validateZip(validationFileStream.get());
}
} finally {
optionalFileStream.ifPresent(IOUtils::closeQuietly);
validationFileStream.ifPresent(IOUtils::closeQuietly);
workFile = new File(itemImportService.getTempWorkDir() + File.separator
+ zipfilename + "-" + context.getCurrentUser().getID());
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
+ File.separator + context.getCurrentUser().getID());
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
} else {
throw new IllegalArgumentException(
"Error reading file, the file couldn't be found for filename: " + zipfilename);
}
}
}

View File

@@ -605,7 +605,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
Item item = null;
String[] mf = metaKey.split("\\.");
String mf[] = metaKey.split("\\.");
if (mf.length < 2) {
throw new Exception("Bad metadata field in reference: '" + metaKey +
"' (expected syntax is schema.element[.qualifier])");
@@ -913,7 +913,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
// Load any additional metadata schemas
File folder = new File(path);
File[] file = folder.listFiles(metadataFileFilter);
File file[] = folder.listFiles(metadataFileFilter);
for (int i = 0; i < file.length; i++) {
loadDublinCore(c, myitem, file[i].getAbsolutePath());
}

View File

@@ -97,7 +97,7 @@ public class ItemArchive {
//The code to search for local schema files was copied from org.dspace.app.itemimport
// .ItemImportServiceImpl.java
File[] file = dir.listFiles(new LocalSchemaFilenameFilter());
File file[] = dir.listFiles(new LocalSchemaFilenameFilter());
for (int i = 0; i < file.length; i++) {
is = new FileInputStream(file[i]);
itarch.dtomList.addAll(MetadataUtilities.loadDublinCore(XMLUtils.getDocumentBuilder(), is));

View File

@@ -7,7 +7,9 @@
*/
package org.dspace.app.mediafilter;
import java.awt.image.BufferedImage;
import java.io.InputStream;
import javax.imageio.ImageIO;
import org.dspace.content.Item;
import org.dspace.services.ConfigurationService;
@@ -61,20 +63,27 @@ public class BrandedPreviewJPEGFilter extends MediaFilter {
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
// read in bitstream's image
BufferedImage buf = ImageIO.read(source);
// get config params
ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
int xmax = configurationService.getIntProperty("webui.preview.maxwidth");
int ymax = configurationService.getIntProperty("webui.preview.maxheight");
boolean blurring = configurationService.getBooleanProperty("webui.preview.blurring");
boolean hqscaling = configurationService.getBooleanProperty("webui.preview.hqscaling");
float xmax = (float) configurationService
.getIntProperty("webui.preview.maxwidth");
float ymax = (float) configurationService
.getIntProperty("webui.preview.maxheight");
boolean blurring = (boolean) configurationService
.getBooleanProperty("webui.preview.blurring");
boolean hqscaling = (boolean) configurationService
.getBooleanProperty("webui.preview.hqscaling");
int brandHeight = configurationService.getIntProperty("webui.preview.brand.height");
String brandFont = configurationService.getProperty("webui.preview.brand.font");
int brandFontPoint = configurationService.getIntProperty("webui.preview.brand.fontpoint");
JPEGFilter jpegFilter = new JPEGFilter();
return jpegFilter.getThumb(
currentItem, source, verbose, xmax, ymax, blurring, hqscaling, brandHeight, brandFontPoint, brandFont
);
return jpegFilter
.getThumbDim(currentItem, buf, verbose, xmax, ymax, blurring, hqscaling, brandHeight, brandFontPoint,
brandFont);
}
}

View File

@@ -14,7 +14,7 @@ import java.io.InputStream;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import org.apache.pdfbox.Loader;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.pdfbox.pdmodel.common.PDRectangle;
import org.dspace.content.Bitstream;
@@ -153,8 +153,8 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
// the CropBox is missing or empty because pdfbox will set it to the
// same size as the MediaBox if it doesn't exist. Also note that we
// only need to check the first page, since that's what we use for
// generating the thumbnail (PDPage uses a zero-based index).
PDPage pdfPage = Loader.loadPDF(f).getPage(0);
// generating the thumbnail (PDDocument uses a zero-based index).
PDPage pdfPage = PDDocument.load(f).getPage(0);
PDRectangle pdfPageMediaBox = pdfPage.getMediaBox();
PDRectangle pdfPageCropBox = pdfPage.getCropBox();

View File

@@ -8,32 +8,19 @@
package org.dspace.app.mediafilter;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.awt.Transparency;
import java.awt.geom.AffineTransform;
import java.awt.image.BufferedImage;
import java.awt.image.BufferedImageOp;
import java.awt.image.ConvolveOp;
import java.awt.image.Kernel;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import javax.imageio.ImageIO;
import com.drew.imaging.ImageMetadataReader;
import com.drew.imaging.ImageProcessingException;
import com.drew.metadata.Metadata;
import com.drew.metadata.MetadataException;
import com.drew.metadata.exif.ExifIFD0Directory;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
@@ -46,8 +33,6 @@ import org.dspace.services.factory.DSpaceServicesFactory;
* @author Jason Sherman jsherman@usao.edu
*/
public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats {
private static final Logger log = LogManager.getLogger(JPEGFilter.class);
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".jpg";
@@ -77,115 +62,6 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
return "Generated Thumbnail";
}
/**
* Gets the rotation angle from image's metadata using ImageReader.
* This method consumes the InputStream, so you need to be careful to don't reuse the same InputStream after
* computing the rotation angle.
*
* @param buf InputStream of the image file
* @return Rotation angle in degrees (0, 90, 180, or 270)
*/
public static int getImageRotationUsingImageReader(InputStream buf) {
try {
Metadata metadata = ImageMetadataReader.readMetadata(buf);
ExifIFD0Directory directory = metadata.getFirstDirectoryOfType(ExifIFD0Directory.class);
if (directory != null && directory.containsTag(ExifIFD0Directory.TAG_ORIENTATION)) {
return convertRotationToDegrees(directory.getInt(ExifIFD0Directory.TAG_ORIENTATION));
}
} catch (MetadataException | ImageProcessingException | IOException e) {
log.error("Error reading image metadata", e);
}
return 0;
}
public static int convertRotationToDegrees(int valueNode) {
// Common orientation values:
// 1 = Normal (0°)
// 6 = Rotated 90° CW
// 3 = Rotated 180°
// 8 = Rotated 270° CW
switch (valueNode) {
case 6:
return 90;
case 3:
return 180;
case 8:
return 270;
default:
return 0;
}
}
/**
* Rotates an image by the specified angle
*
* @param image The original image
* @param angle The rotation angle in degrees
* @return Rotated image
*/
public static BufferedImage rotateImage(BufferedImage image, int angle) {
if (angle == 0) {
return image;
}
double radians = Math.toRadians(angle);
double sin = Math.abs(Math.sin(radians));
double cos = Math.abs(Math.cos(radians));
int newWidth = (int) Math.round(image.getWidth() * cos + image.getHeight() * sin);
int newHeight = (int) Math.round(image.getWidth() * sin + image.getHeight() * cos);
BufferedImage rotated = new BufferedImage(newWidth, newHeight, image.getType());
Graphics2D g2d = rotated.createGraphics();
AffineTransform at = new AffineTransform();
at.translate(newWidth / 2, newHeight / 2);
at.rotate(radians);
at.translate(-image.getWidth() / 2, -image.getHeight() / 2);
g2d.setTransform(at);
g2d.drawImage(image, 0, 0, null);
g2d.dispose();
return rotated;
}
/**
* Calculates scaled dimension while maintaining aspect ratio
*
* @param imgSize Original image dimensions
* @param boundary Maximum allowed dimensions
* @return New dimensions that fit within boundary while preserving aspect ratio
*/
private Dimension getScaledDimension(Dimension imgSize, Dimension boundary) {
int originalWidth = imgSize.width;
int originalHeight = imgSize.height;
int boundWidth = boundary.width;
int boundHeight = boundary.height;
int newWidth = originalWidth;
int newHeight = originalHeight;
// First check if we need to scale width
if (originalWidth > boundWidth) {
// Scale width to fit
newWidth = boundWidth;
// Scale height to maintain aspect ratio
newHeight = (newWidth * originalHeight) / originalWidth;
}
// Then check if we need to scale even with the new height
if (newHeight > boundHeight) {
// Scale height to fit instead
newHeight = boundHeight;
newWidth = (newHeight * originalWidth) / originalHeight;
}
return new Dimension(newWidth, newHeight);
}
/**
* @param currentItem item
* @param source source input stream
@@ -196,65 +72,10 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
return getThumb(currentItem, source, verbose);
}
// read in bitstream's image
BufferedImage buf = ImageIO.read(source);
public InputStream getThumb(Item currentItem, InputStream source, boolean verbose)
throws Exception {
// get config params
final ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
int xmax = configurationService
.getIntProperty("thumbnail.maxwidth");
int ymax = configurationService
.getIntProperty("thumbnail.maxheight");
boolean blurring = (boolean) configurationService
.getBooleanProperty("thumbnail.blurring");
boolean hqscaling = (boolean) configurationService
.getBooleanProperty("thumbnail.hqscaling");
return getThumb(currentItem, source, verbose, xmax, ymax, blurring, hqscaling, 0, 0, null);
}
protected InputStream getThumb(
Item currentItem,
InputStream source,
boolean verbose,
int xmax,
int ymax,
boolean blurring,
boolean hqscaling,
int brandHeight,
int brandFontPoint,
String brandFont
) throws Exception {
File tempFile = File.createTempFile("temp", ".tmp");
tempFile.deleteOnExit();
// Write to temp file
try (FileOutputStream fos = new FileOutputStream(tempFile)) {
byte[] buffer = new byte[4096];
int len;
while ((len = source.read(buffer)) != -1) {
fos.write(buffer, 0, len);
}
}
int rotation = 0;
try (FileInputStream fis = new FileInputStream(tempFile)) {
rotation = getImageRotationUsingImageReader(fis);
}
try (FileInputStream fis = new FileInputStream(tempFile)) {
// read in bitstream's image
BufferedImage buf = ImageIO.read(fis);
return getThumbDim(
currentItem, buf, verbose, xmax, ymax, blurring, hqscaling, brandHeight, brandFontPoint, rotation,
brandFont
);
}
return getThumb(currentItem, buf, verbose);
}
public InputStream getThumb(Item currentItem, BufferedImage buf, boolean verbose)
@@ -262,28 +83,25 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
// get config params
final ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
int xmax = configurationService
float xmax = (float) configurationService
.getIntProperty("thumbnail.maxwidth");
int ymax = configurationService
float ymax = (float) configurationService
.getIntProperty("thumbnail.maxheight");
boolean blurring = (boolean) configurationService
.getBooleanProperty("thumbnail.blurring");
boolean hqscaling = (boolean) configurationService
.getBooleanProperty("thumbnail.hqscaling");
return getThumbDim(currentItem, buf, verbose, xmax, ymax, blurring, hqscaling, 0, 0, 0, null);
return getThumbDim(currentItem, buf, verbose, xmax, ymax, blurring, hqscaling, 0, 0, null);
}
public InputStream getThumbDim(Item currentItem, BufferedImage buf, boolean verbose, int xmax, int ymax,
public InputStream getThumbDim(Item currentItem, BufferedImage buf, boolean verbose, float xmax, float ymax,
boolean blurring, boolean hqscaling, int brandHeight, int brandFontPoint,
int rotation, String brandFont)
String brandFont)
throws Exception {
// Rotate the image if needed
BufferedImage correctedImage = rotateImage(buf, rotation);
int xsize = correctedImage.getWidth();
int ysize = correctedImage.getHeight();
// now get the image dimensions
float xsize = (float) buf.getWidth(null);
float ysize = (float) buf.getHeight(null);
// if verbose flag is set, print out dimensions
// to STDOUT
@@ -291,63 +109,86 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
System.out.println("original size: " + xsize + "," + ysize);
}
// Calculate new dimensions while maintaining aspect ratio
Dimension newDimension = getScaledDimension(
new Dimension(xsize, ysize),
new Dimension(xmax, ymax)
);
// scale by x first if needed
if (xsize > xmax) {
// calculate scaling factor so that xsize * scale = new size (max)
float scale_factor = xmax / xsize;
// if verbose flag is set, print out extracted text
// to STDOUT
if (verbose) {
System.out.println("x scale factor: " + scale_factor);
}
// now reduce x size
// and y size
xsize = xsize * scale_factor;
ysize = ysize * scale_factor;
// if verbose flag is set, print out extracted text
// to STDOUT
if (verbose) {
System.out.println("size after fitting to maximum width: " + xsize + "," + ysize);
}
}
// scale by y if needed
if (ysize > ymax) {
float scale_factor = ymax / ysize;
// now reduce x size
// and y size
xsize = xsize * scale_factor;
ysize = ysize * scale_factor;
}
// if verbose flag is set, print details to STDOUT
if (verbose) {
System.out.println("size after fitting to maximum height: " + newDimension.width + ", "
+ newDimension.height);
System.out.println("size after fitting to maximum height: " + xsize + ", "
+ ysize);
}
xsize = newDimension.width;
ysize = newDimension.height;
// create an image buffer for the thumbnail with the new xsize, ysize
BufferedImage thumbnail = new BufferedImage(xsize, ysize, BufferedImage.TYPE_INT_RGB);
BufferedImage thumbnail = new BufferedImage((int) xsize, (int) ysize,
BufferedImage.TYPE_INT_RGB);
// Use blurring if selected in config.
// a little blur before scaling does wonders for keeping moire in check.
if (blurring) {
// send the buffered image off to get blurred.
correctedImage = getBlurredInstance(correctedImage);
buf = getBlurredInstance((BufferedImage) buf);
}
// Use high quality scaling method if selected in config.
// this has a definite performance penalty.
if (hqscaling) {
// send the buffered image off to get an HQ downscale.
correctedImage = getScaledInstance(correctedImage, xsize, ysize,
RenderingHints.VALUE_INTERPOLATION_BICUBIC, true);
buf = getScaledInstance((BufferedImage) buf, (int) xsize, (int) ysize,
(Object) RenderingHints.VALUE_INTERPOLATION_BICUBIC, (boolean) true);
}
// now render the image into the thumbnail buffer
Graphics2D g2d = thumbnail.createGraphics();
g2d.drawImage(correctedImage, 0, 0, xsize, ysize, null);
g2d.drawImage(buf, 0, 0, (int) xsize, (int) ysize, null);
if (brandHeight != 0) {
ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
Brand brand = new Brand(xsize, brandHeight, new Font(brandFont, Font.PLAIN, brandFontPoint), 5);
Brand brand = new Brand((int) xsize, brandHeight, new Font(brandFont, Font.PLAIN, brandFontPoint), 5);
BufferedImage brandImage = brand.create(configurationService.getProperty("webui.preview.brand"),
configurationService.getProperty("webui.preview.brand.abbrev"),
currentItem == null ? "" : "hdl:" + currentItem.getHandle());
g2d.drawImage(brandImage, 0, ysize, xsize, 20, null);
g2d.drawImage(brandImage, (int) 0, (int) ysize, (int) xsize, (int) 20, null);
}
ByteArrayInputStream bais;
// now create an input stream for the thumbnail buffer and return it
try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
ImageIO.write(thumbnail, "jpeg", baos);
// now get the array
bais = new ByteArrayInputStream(baos.toByteArray());
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ImageIO.write(thumbnail, "jpeg", baos);
// now get the array
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
return bais; // hope this gets written out before its garbage collected!
}

View File

@@ -11,8 +11,6 @@ import java.awt.image.BufferedImage;
import java.io.InputStream;
import org.apache.logging.log4j.Logger;
import org.apache.pdfbox.Loader;
import org.apache.pdfbox.io.RandomAccessReadBuffer;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.encryption.InvalidPasswordException;
import org.apache.pdfbox.rendering.PDFRenderer;
@@ -73,7 +71,7 @@ public class PDFBoxThumbnail extends MediaFilter {
BufferedImage buf;
// Render the page image.
try ( PDDocument doc = Loader.loadPDF(new RandomAccessReadBuffer(source)); ) {
try ( PDDocument doc = PDDocument.load(source); ) {
PDFRenderer renderer = new PDFRenderer(doc);
buf = renderer.renderImage(0);
} catch (InvalidPasswordException ex) {
@@ -83,7 +81,6 @@ public class PDFBoxThumbnail extends MediaFilter {
// Generate thumbnail derivative and return as IO stream.
JPEGFilter jpegFilter = new JPEGFilter();
return jpegFilter.getThumb(currentItem, buf, verbose);
}
}

View File

@@ -224,7 +224,7 @@ public class Packager {
} else {
//otherwise, display list of valid packager types
System.out.println("\nAvailable Submission Package (SIP) types:");
String[] pn = pluginService
String pn[] = pluginService
.getAllPluginNames(PackageIngester.class);
for (int i = 0; i < pn.length; ++i) {
System.out.println(" " + pn[i]);
@@ -274,7 +274,7 @@ public class Packager {
// process
pkgParams.setRecursiveModeEnabled(true);
}
String[] files = line.getArgs();
String files[] = line.getArgs();
if (files.length > 0) {
sourceFile = files[0];
}
@@ -282,9 +282,9 @@ public class Packager {
myPackager.submit = false;
}
if (line.hasOption('o')) {
String[] popt = line.getOptionValues('o');
String popt[] = line.getOptionValues('o');
for (int i = 0; i < popt.length; ++i) {
String[] pair = popt[i].split("\\=", 2);
String pair[] = popt[i].split("\\=", 2);
if (pair.length == 2) {
pkgParams.addProperty(pair[0].trim(), pair[1].trim());
} else if (pair.length == 1) {
@@ -383,7 +383,7 @@ public class Packager {
}
// validate each parent arg (if any)
DSpaceObject[] parentObjs = null;
DSpaceObject parentObjs[] = null;
if (parents != null) {
System.out.println("Destination parents:");
@@ -461,7 +461,7 @@ public class Packager {
* @throws PackageException if packaging error
*/
protected void ingest(Context context, PackageIngester sip, PackageParameters pkgParams, String sourceFile,
DSpaceObject[] parentObjs)
DSpaceObject parentObjs[])
throws IOException, SQLException, FileNotFoundException, AuthorizeException, CrosswalkException,
PackageException {
// make sure we have an input file

View File

@@ -132,7 +132,7 @@ public class MetadataExposureServiceImpl implements MetadataExposureService {
if (key.startsWith(CONFIG_PREFIX)) {
if (configurationService.getBooleanProperty(key, true)) {
String mdField = key.substring(CONFIG_PREFIX.length());
String[] segment = mdField.split("\\.", 3);
String segment[] = mdField.split("\\.", 3);
// got schema.element.qualifier
if (segment.length == 3) {

View File

@@ -113,7 +113,7 @@ public class SyndicationFeed {
configurationService.getProperty("webui.feed.item.date", defaultDateField);
// metadata field for Item description in entry:
private static final String[] descriptionFields =
private static final String descriptionFields[] =
DSpaceServicesFactory.getInstance().getConfigurationService()
.getArrayProperty("webui.feed.item.description", defaultDescriptionFields);

View File

@@ -535,7 +535,7 @@ public class LDAPAuthentication implements AuthenticationMethod {
resultDN = (sr.getName() + "," + ldap_search_context);
}
String[] attlist = {ldap_email_field, ldap_givenname_field,
String attlist[] = {ldap_email_field, ldap_givenname_field,
ldap_surname_field, ldap_phone_field, ldap_group_field};
Attributes atts = sr.getAttributes();
Attribute att;
@@ -743,7 +743,7 @@ public class LDAPAuthentication implements AuthenticationMethod {
// groupmap contains the mapping of LDAP groups to DSpace groups
// outer loop with the DSpace groups
while (groupMap != null) {
String[] t = groupMap.split(":");
String t[] = groupMap.split(":");
String ldapSearchString = t[0];
String dspaceGroupName = t[1];

View File

@@ -20,6 +20,8 @@ import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.dspace.authority.service.AuthorityValueService;
import org.dspace.content.authority.SolrAuthority;
import org.dspace.core.Context;
import org.dspace.core.LogHelper;
import org.springframework.beans.factory.annotation.Autowired;
/**
@@ -34,7 +36,7 @@ public class AuthorityValueServiceImpl implements AuthorityValueService {
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthorityValueServiceImpl.class);
@Autowired
@Autowired(required = true)
protected AuthorityTypes authorityTypes;
protected AuthorityValueServiceImpl() {
@@ -42,7 +44,7 @@ public class AuthorityValueServiceImpl implements AuthorityValueService {
}
@Override
public AuthorityValue generate(String authorityKey, String content, String field) {
public AuthorityValue generate(Context context, String authorityKey, String content, String field) {
AuthorityValue nextValue = null;
nextValue = generateRaw(authorityKey, content, field);
@@ -53,7 +55,7 @@ public class AuthorityValueServiceImpl implements AuthorityValueService {
if (StringUtils.isBlank(authorityKey)) {
// An existing metadata without authority is being indexed
// If there is an exact match in the index, reuse it before adding a new one.
List<AuthorityValue> byValue = findByExactValue(field, content);
List<AuthorityValue> byValue = findByExactValue(context, field, content);
if (byValue != null && !byValue.isEmpty()) {
authorityKey = byValue.get(0).getId();
} else {
@@ -116,70 +118,71 @@ public class AuthorityValueServiceImpl implements AuthorityValueService {
/**
* Item.ANY does not work here.
*
* @param context Context
* @param authorityID authority id
* @return AuthorityValue
*/
@Override
public AuthorityValue findByUID(String authorityID) {
public AuthorityValue findByUID(Context context, String authorityID) {
//Ensure that if we use the full identifier to match on
String queryString = "id:\"" + authorityID + "\"";
List<AuthorityValue> findings = find(queryString);
List<AuthorityValue> findings = find(context, queryString);
return findings.size() > 0 ? findings.get(0) : null;
}
@Override
public List<AuthorityValue> findByValue(String field, String value) {
public List<AuthorityValue> findByValue(Context context, String field, String value) {
String queryString = "value:" + value + " AND field:" + field;
return find(queryString);
return find(context, queryString);
}
@Override
public AuthorityValue findByOrcidID(String orcid_id) {
public AuthorityValue findByOrcidID(Context context, String orcid_id) {
String queryString = "orcid_id:" + orcid_id;
List<AuthorityValue> findings = find(queryString);
List<AuthorityValue> findings = find(context, queryString);
return findings.size() > 0 ? findings.get(0) : null;
}
@Override
public List<AuthorityValue> findByExactValue(String field, String value) {
public List<AuthorityValue> findByExactValue(Context context, String field, String value) {
String queryString = "value:\"" + value + "\" AND field:" + field;
return find(queryString);
return find(context, queryString);
}
@Override
public List<AuthorityValue> findByValue(String schema, String element, String qualifier,
public List<AuthorityValue> findByValue(Context context, String schema, String element, String qualifier,
String value) {
String field = fieldParameter(schema, element, qualifier);
return findByValue(field, value);
return findByValue(context, field, value);
}
@Override
public List<AuthorityValue> findByName(String schema, String element, String qualifier,
public List<AuthorityValue> findByName(Context context, String schema, String element, String qualifier,
String name) {
String field = fieldParameter(schema, element, qualifier);
String queryString = "first_name:" + name + " OR last_name:" + name + " OR name_variant:" + name + " AND " +
"field:" + field;
return find(queryString);
return find(context, queryString);
}
@Override
public List<AuthorityValue> findByAuthorityMetadata(String schema, String element,
public List<AuthorityValue> findByAuthorityMetadata(Context context, String schema, String element,
String qualifier, String value) {
String field = fieldParameter(schema, element, qualifier);
String queryString = "all_Labels:" + value + " AND field:" + field;
return find(queryString);
return find(context, queryString);
}
@Override
public List<AuthorityValue> findOrcidHolders() {
public List<AuthorityValue> findOrcidHolders(Context context) {
String queryString = "orcid_id:*";
return find(queryString);
return find(context, queryString);
}
@Override
public List<AuthorityValue> findAll() {
public List<AuthorityValue> findAll(Context context) {
String queryString = "*:*";
return find(queryString);
return find(context, queryString);
}
@Override
@@ -201,7 +204,7 @@ public class AuthorityValueServiceImpl implements AuthorityValueService {
return fromAuthority;
}
protected List<AuthorityValue> find(String queryString) {
protected List<AuthorityValue> find(Context context, String queryString) {
List<AuthorityValue> findings = new ArrayList<AuthorityValue>();
try {
SolrQuery solrQuery = new SolrQuery();
@@ -217,7 +220,8 @@ public class AuthorityValueServiceImpl implements AuthorityValueService {
}
}
} catch (Exception e) {
log.error("Error while retrieving AuthorityValue from solr. query: " + queryString, e);
log.error(LogHelper.getHeader(context, "Error while retrieving AuthorityValue from solr",
"query: " + queryString), e);
}
return findings;

View File

@@ -133,11 +133,11 @@ public class UpdateAuthorities {
if (selectedIDs != null && !selectedIDs.isEmpty()) {
authorities = new ArrayList<>();
for (String selectedID : selectedIDs) {
AuthorityValue byUID = authorityValueService.findByUID(selectedID);
AuthorityValue byUID = authorityValueService.findByUID(context, selectedID);
authorities.add(byUID);
}
} else {
authorities = authorityValueService.findAll();
authorities = authorityValueService.findAll(context);
}
if (authorities != null) {

View File

@@ -148,12 +148,12 @@ public class DSpaceAuthorityIndexer implements AuthorityIndexerInterface, Initia
!metadataAuthorityKey.startsWith(AuthorityValueService.GENERATE)) {
// !uid.startsWith(AuthorityValueGenerator.GENERATE) is not strictly
// necessary here but it prevents exceptions in solr
AuthorityValue value = authorityValueService.findByUID(metadataAuthorityKey);
AuthorityValue value = authorityValueService.findByUID(context, metadataAuthorityKey);
if (value != null) {
return value;
}
}
return authorityValueService.generate(metadataAuthorityKey,
return authorityValueService.generate(context, metadataAuthorityKey,
metadataContent, metadataField.replaceAll("\\.", "_"));
}

View File

@@ -11,6 +11,7 @@ import java.util.List;
import org.apache.solr.common.SolrDocument;
import org.dspace.authority.AuthorityValue;
import org.dspace.core.Context;
/**
* This service contains all methods for using authority values
@@ -24,125 +25,32 @@ public interface AuthorityValueService {
public static final String SPLIT = "::";
public static final String GENERATE = "will be generated" + SPLIT;
/**
* Generates an {@link AuthorityValue} based on the given parameters.
*
* @param authorityKey the authority key to be assigned to the generated authority value
* @param content the content of the generated authority value
* @param field the field of the generated authority value
* @return the generated {@link AuthorityValue}
*/
public AuthorityValue generate(String authorityKey, String content, String field);
public AuthorityValue generate(Context context, String authorityKey, String content, String field);
/**
* Updates an AuthorityValue.
*
* @param value the AuthorityValue to be updated
* @return the updated AuthorityValue
*/
public AuthorityValue update(AuthorityValue value);
/**
* Finds an AuthorityValue based on the provided authorityID.
*
* @param authorityID the authority ID used to search for the AuthorityValue
* @return the found AuthorityValue, or null if no match is found
*/
public AuthorityValue findByUID(String authorityID);
public AuthorityValue findByUID(Context context, String authorityID);
/**
* Finds AuthorityValues in the given context based on field and value.
*
* @param field the field to search for AuthorityValues
* @param value the value to search for AuthorityValues
* @return a list of found AuthorityValues matching the given field and value, or an empty list if no match is found
*/
public List<AuthorityValue> findByValue(String field, String value);
public List<AuthorityValue> findByValue(Context context, String field, String value);
/**
* Finds an {@link AuthorityValue} based on the provided ORCID ID.
*
* @param orcid_id the ORCID ID used to search for the AuthorityValue
* @return the found AuthorityValue, or null if no match is found
*/
public AuthorityValue findByOrcidID(String orcid_id);
public AuthorityValue findByOrcidID(Context context, String orcid_id);
/**
* Finds {@link AuthorityValue}s based on the provided metadata schema, element, qualifier, and name.
*
* @param schema the schema of the AuthorityValue
* @param element the element of the AuthorityValue
* @param qualifier the qualifier of the AuthorityValue
* @param name the name of the AuthorityValue
* @return a list of found AuthorityValues matching the given schema, element, qualifier, and name,
* or an empty list if no match is found
*/
public List<AuthorityValue> findByName(String schema, String element, String qualifier,
public List<AuthorityValue> findByName(Context context, String schema, String element, String qualifier,
String name);
/**
* Finds {@link AuthorityValue}s based on the provided metadata schema, element, qualifier, and value.
*
* @param schema the schema of the AuthorityValue
* @param element the element of the AuthorityValue
* @param qualifier the qualifier of the AuthorityValue
* @param value the value of the AuthorityValue
* @return a list of found AuthorityValues matching the given schema, element, qualifier, and value,
* or an empty list if no match is found
*/
public List<AuthorityValue> findByAuthorityMetadata(String schema, String element,
public List<AuthorityValue> findByAuthorityMetadata(Context context, String schema, String element,
String qualifier, String value);
/**
* Finds {@link AuthorityValue}s in the given context based on the exact field and value.
*
* @param field the field to search for AuthorityValues
* @param value the value to search for AuthorityValues
* @return a list of found AuthorityValues matching the given field and value,
* or an empty list if no match is found
*/
public List<AuthorityValue> findByExactValue(String field, String value);
public List<AuthorityValue> findByExactValue(Context context, String field, String value);
/**
* Finds {@link AuthorityValue}s based on the provided metadata schema, element, qualifier, and value.
*
* @param schema the schema of the AuthorityValue
* @param element the element of the AuthorityValue
* @param qualifier the qualifier of the AuthorityValue
* @param value the value of the AuthorityValue
* @return a list of found AuthorityValues matching the given schema, element, qualifier, and value,
* or an empty list if no match is found
*/
public List<AuthorityValue> findByValue(String schema, String element, String qualifier,
public List<AuthorityValue> findByValue(Context context, String schema, String element, String qualifier,
String value);
/**
* Finds AuthorityValues that are ORCID person authority values.
*
* @return a list of AuthorityValues or an empty list if no matching values are found
*/
public List<AuthorityValue> findOrcidHolders();
public List<AuthorityValue> findOrcidHolders(Context context);
/**
* Retrieves all AuthorityValues from Solr.
*
* @return A list of all AuthorityValues.
*/
public List<AuthorityValue> findAll();
public List<AuthorityValue> findAll(Context context);
/**
* Converts a SolrDocument into an AuthorityValue object.
*
* @param solrDocument the SolrDocument to convert
* @return the converted AuthorityValue object
*/
public AuthorityValue fromSolr(SolrDocument solrDocument);
/**
* Retrieves the type of authority value based on the provided metadata string.
*
* @param metadataString the metadata string used to determine the authority value type
* @return the {@link AuthorityValue} representing the type of authority value, or null if no match is found
*/
public AuthorityValue getAuthorityValueType(String metadataString);
}

View File

@@ -16,7 +16,6 @@ import java.time.LocalDate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.UUID;
import org.apache.commons.collections4.CollectionUtils;
@@ -49,7 +48,6 @@ import org.dspace.discovery.indexobject.IndexableItem;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.eperson.service.GroupService;
import org.dspace.services.ConfigurationService;
import org.dspace.workflow.WorkflowItemService;
import org.springframework.beans.factory.annotation.Autowired;
@@ -86,8 +84,6 @@ public class AuthorizeServiceImpl implements AuthorizeService {
protected WorkflowItemService workflowItemService;
@Autowired(required = true)
private SearchService searchService;
@Autowired(required = true)
private ConfigurationService configurationService;
protected AuthorizeServiceImpl() {
@@ -512,26 +508,17 @@ public class AuthorizeServiceImpl implements AuthorizeService {
return resourcePolicyService.find(c, o, actionID);
}
@Override
public void inheritPolicies(Context c, DSpaceObject src, DSpaceObject dest)
throws SQLException, AuthorizeException {
inheritPolicies(c, src, dest, false);
}
@Override
public void inheritPolicies(Context c, DSpaceObject src,
DSpaceObject dest, boolean includeCustom) throws SQLException, AuthorizeException {
DSpaceObject dest) throws SQLException, AuthorizeException {
// find all policies for the source object
List<ResourcePolicy> policies = getPolicies(c, src);
// Only inherit non-ADMIN policies (since ADMIN policies are automatically inherited)
// and non-custom policies (usually applied manually?) UNLESS specified otherwise with includCustom
// (for example, item.addBundle() will inherit custom policies to enforce access conditions)
//Only inherit non-ADMIN policies (since ADMIN policies are automatically inherited)
//and non-custom policies as these are manually applied when appropriate
List<ResourcePolicy> nonAdminPolicies = new ArrayList<>();
for (ResourcePolicy rp : policies) {
if (rp.getAction() != Constants.ADMIN && (!StringUtils.equals(rp.getRpType(), ResourcePolicy.TYPE_CUSTOM)
|| (includeCustom && StringUtils.equals(rp.getRpType(), ResourcePolicy.TYPE_CUSTOM)
&& isNotAlreadyACustomRPOfThisTypeOnDSO(c, dest)))) {
if (rp.getAction() != Constants.ADMIN && !StringUtils.equals(rp.getRpType(), ResourcePolicy.TYPE_CUSTOM)) {
nonAdminPolicies.add(rp);
}
}
@@ -957,100 +944,4 @@ public class AuthorizeServiceImpl implements AuthorizeService {
return query + " AND ";
}
}
/**
* Add the default policies, which have not been already added to the given DSpace object
*
* @param context The relevant DSpace Context.
* @param dso The DSpace Object to add policies to
* @param defaultCollectionPolicies list of policies
* @throws SQLException An exception that provides information on a database access error or other errors.
* @throws AuthorizeException Exception indicating the current user of the context does not have permission
* to perform a particular action.
*/
@Override
public void addDefaultPoliciesNotInPlace(Context context, DSpaceObject dso,
List<ResourcePolicy> defaultCollectionPolicies) throws SQLException, AuthorizeException {
boolean appendMode = configurationService
.getBooleanProperty("core.authorization.installitem.inheritance-read.append-mode", false);
for (ResourcePolicy defaultPolicy : defaultCollectionPolicies) {
if (!isAnIdenticalPolicyAlreadyInPlace(context, dso, defaultPolicy.getGroup(), Constants.READ,
defaultPolicy.getID()) &&
(!appendMode && isNotAlreadyACustomRPOfThisTypeOnDSO(context, dso) ||
appendMode && shouldBeAppended(context, dso, defaultPolicy))) {
ResourcePolicy newPolicy = resourcePolicyService.clone(context, defaultPolicy);
newPolicy.setdSpaceObject(dso);
newPolicy.setAction(Constants.READ);
newPolicy.setRpType(ResourcePolicy.TYPE_INHERITED);
resourcePolicyService.update(context, newPolicy);
}
}
}
/**
* Add a list of custom policies if there are already NO custom policies in place
*
*/
@Override
public void addCustomPoliciesNotInPlace(Context context, DSpaceObject dso, List<ResourcePolicy> customPolicies)
throws SQLException, AuthorizeException {
boolean customPoliciesAlreadyInPlace =
findPoliciesByDSOAndType(context, dso, ResourcePolicy.TYPE_CUSTOM).size() > 0;
if (!customPoliciesAlreadyInPlace) {
addPolicies(context, customPolicies, dso);
}
}
/**
* Check whether or not there is already an RP on the given dso, which has actionId={@link Constants.READ} and
* resourceTypeId={@link ResourcePolicy.TYPE_CUSTOM}
*
* @param context DSpace context
* @param dso DSpace object to check for custom read RP
* @return True if there is no RP on the item with custom read RP, otherwise false
* @throws SQLException If something goes wrong retrieving the RP on the DSO
*/
private boolean isNotAlreadyACustomRPOfThisTypeOnDSO(Context context, DSpaceObject dso) throws SQLException {
return isNotAlreadyACustomRPOfThisTypeOnDSO(context, dso, Constants.READ);
}
private boolean isNotAlreadyACustomRPOfThisTypeOnDSO(Context context, DSpaceObject dso, int action)
throws SQLException {
List<ResourcePolicy> rps = resourcePolicyService.find(context, dso, action);
for (ResourcePolicy rp : rps) {
if (rp.getRpType() != null && rp.getRpType().equals(ResourcePolicy.TYPE_CUSTOM)) {
return false;
}
}
return true;
}
/**
* Check if the provided default policy should be appended or not to the final
* item. If an item has at least one custom READ policy any anonymous READ
* policy with empty start/end date should be skipped
*
* @param context DSpace context
* @param dso DSpace object to check for custom read RP
* @param defaultPolicy The policy to check
* @return
* @throws SQLException If something goes wrong retrieving the RP on the DSO
*/
private boolean shouldBeAppended(Context context, DSpaceObject dso, ResourcePolicy defaultPolicy)
throws SQLException {
boolean hasCustomPolicy = resourcePolicyService.find(context, dso, Constants.READ)
.stream()
.filter(rp -> (Objects.nonNull(rp.getRpType()) &&
Objects.equals(rp.getRpType(), ResourcePolicy.TYPE_CUSTOM)))
.findFirst()
.isPresent();
boolean isAnonymousGroup = Objects.nonNull(defaultPolicy.getGroup())
&& StringUtils.equals(defaultPolicy.getGroup().getName(), Group.ANONYMOUS);
boolean datesAreNull = Objects.isNull(defaultPolicy.getStartDate())
&& Objects.isNull(defaultPolicy.getEndDate());
return !(hasCustomPolicy && isAnonymousGroup && datesAreNull);
}
}

View File

@@ -322,19 +322,6 @@ public interface AuthorizeService {
*/
public List<ResourcePolicy> getPoliciesActionFilterExceptRpType(Context c, DSpaceObject o, int actionID,
String rpType) throws SQLException;
/**
* Add policies to an object to match those from a previous object
*
* @param c context
* @param src source of policies
* @param dest destination of inherited policies
* @param includeCustom whether TYPE_CUSTOM policies should be inherited
* @throws SQLException if there's a database problem
* @throws AuthorizeException if the current user is not authorized to add these policies
*/
public void inheritPolicies(Context c, DSpaceObject src, DSpaceObject dest, boolean includeCustom)
throws SQLException, AuthorizeException;
/**
* Add policies to an object to match those from a previous object
*
@@ -618,10 +605,4 @@ public interface AuthorizeService {
public void replaceAllPolicies(Context context, DSpaceObject source, DSpaceObject dest)
throws SQLException, AuthorizeException;
public void addDefaultPoliciesNotInPlace(Context context, DSpaceObject dso,
List<ResourcePolicy> defaultCollectionPolicies) throws SQLException, AuthorizeException;
public void addCustomPoliciesNotInPlace(Context context, DSpaceObject dso,
List<ResourcePolicy> defaultCollectionPolicies) throws SQLException, AuthorizeException;
}

View File

@@ -131,7 +131,7 @@ public final class CheckerCommand {
collector.collect(context, info);
}
context.commit();
context.uncacheEntity(bitstream);
bitstream = dispatcher.next();
}
}

View File

@@ -56,8 +56,8 @@ public class MostRecentChecksumDAOImpl extends AbstractHibernateDAO<MostRecentCh
criteriaQuery.where(criteriaBuilder.and(
criteriaBuilder.equal(mostRecentChecksumRoot.get(MostRecentChecksum_.toBeProcessed), false),
criteriaBuilder
.lessThanOrEqualTo(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), endDate),
criteriaBuilder.greaterThan(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), startDate)
.lessThanOrEqualTo(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), startDate),
criteriaBuilder.greaterThan(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), endDate)
)
);
List<Order> orderList = new LinkedList<>();

View File

@@ -49,7 +49,7 @@ public class BitstreamFormatServiceImpl implements BitstreamFormatService {
* translate support-level ID to string. MUST keep this table in sync
* with support level definitions above.
*/
protected final String[] supportLevelText =
protected final String supportLevelText[] =
{"UNKNOWN", "KNOWN", "SUPPORTED"};

View File

@@ -124,11 +124,13 @@ public class EntityTypeServiceImpl implements EntityTypeService {
@Override
public List<String> getSubmitAuthorizedTypes(Context context)
throws SQLException, SolrServerException, IOException {
List<String> types = new ArrayList<>();
StringBuilder query = null;
EPerson currentUser = context.getCurrentUser();
if (!authorizeService.isAdmin(context)) {
EPerson currentUser = context.getCurrentUser();
String userId = "";
if (currentUser != null) {
String userId = currentUser.getID().toString();
userId = currentUser.getID().toString();
query = new StringBuilder();
query.append("submit:(e").append(userId);
}
@@ -143,10 +145,7 @@ public class EntityTypeServiceImpl implements EntityTypeService {
}
query.append(group.getID());
}
if (query != null) {
query.append(")");
}
query.append(")");
}
SolrQuery sQuery = new SolrQuery("*:*");
@@ -161,8 +160,6 @@ public class EntityTypeServiceImpl implements EntityTypeService {
sQuery.setFacetSort(FacetParams.FACET_SORT_INDEX);
QueryResponse qResp = solrSearchCore.getSolr().query(sQuery, solrSearchCore.REQUEST_METHOD);
FacetField facetField = qResp.getFacetField("search.entitytype");
List<String> types = new ArrayList<>();
if (Objects.nonNull(facetField)) {
for (Count c : facetField.getValues()) {
types.add(c.getName());

View File

@@ -480,7 +480,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
// now add authorization policies from owning item
// hmm, not very "multiple-inclusion" friendly
authorizeService.inheritPolicies(context, item, bundle, true);
authorizeService.inheritPolicies(context, item, bundle);
// Add the bundle to in-memory list
item.addBundle(bundle);
@@ -1046,8 +1046,8 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
// if come from InstallItem: remove all submission/workflow policies
authorizeService.removeAllPoliciesByDSOAndType(context, mybundle, ResourcePolicy.TYPE_SUBMISSION);
authorizeService.removeAllPoliciesByDSOAndType(context, mybundle, ResourcePolicy.TYPE_WORKFLOW);
authorizeService.addCustomPoliciesNotInPlace(context, mybundle, defaultItemPolicies);
authorizeService.addDefaultPoliciesNotInPlace(context, mybundle, defaultCollectionBundlePolicies);
addCustomPoliciesNotInPlace(context, mybundle, defaultItemPolicies);
addDefaultPoliciesNotInPlace(context, mybundle, defaultCollectionBundlePolicies);
for (Bitstream bitstream : mybundle.getBitstreams()) {
// If collection has default READ policies, remove the bundle's READ policies.
@@ -1093,8 +1093,8 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
throws SQLException, AuthorizeException {
authorizeService.removeAllPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_SUBMISSION);
authorizeService.removeAllPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_WORKFLOW);
authorizeService.addCustomPoliciesNotInPlace(context, bitstream, defaultItemPolicies);
authorizeService.addDefaultPoliciesNotInPlace(context, bitstream, defaultCollectionPolicies);
addCustomPoliciesNotInPlace(context, bitstream, defaultItemPolicies);
addDefaultPoliciesNotInPlace(context, bitstream, defaultCollectionPolicies);
}
@Override
@@ -1132,7 +1132,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
authorizeService.removeAllPoliciesByDSOAndType(context, item, ResourcePolicy.TYPE_WORKFLOW);
// add default policies only if not already in place
authorizeService.addDefaultPoliciesNotInPlace(context, item, defaultCollectionPolicies);
addDefaultPoliciesNotInPlace(context, item, defaultCollectionPolicies);
} finally {
context.restoreAuthSystemState();
}
@@ -1322,7 +1322,91 @@ prevent the generation of resource policy entry values with null dspace_object a
*/
/**
* Add the default policies, which have not been already added to the given DSpace object
*
* @param context The relevant DSpace Context.
* @param dso The DSpace Object to add policies to
* @param defaultCollectionPolicies list of policies
* @throws SQLException An exception that provides information on a database access error or other errors.
* @throws AuthorizeException Exception indicating the current user of the context does not have permission
* to perform a particular action.
*/
protected void addDefaultPoliciesNotInPlace(Context context, DSpaceObject dso,
List<ResourcePolicy> defaultCollectionPolicies) throws SQLException, AuthorizeException {
boolean appendMode = configurationService
.getBooleanProperty("core.authorization.installitem.inheritance-read.append-mode", false);
for (ResourcePolicy defaultPolicy : defaultCollectionPolicies) {
if (!authorizeService
.isAnIdenticalPolicyAlreadyInPlace(context, dso, defaultPolicy.getGroup(), Constants.READ,
defaultPolicy.getID()) &&
(!appendMode && isNotAlreadyACustomRPOfThisTypeOnDSO(context, dso) ||
appendMode && shouldBeAppended(context, dso, defaultPolicy))) {
ResourcePolicy newPolicy = resourcePolicyService.clone(context, defaultPolicy);
newPolicy.setdSpaceObject(dso);
newPolicy.setAction(Constants.READ);
newPolicy.setRpType(ResourcePolicy.TYPE_INHERITED);
resourcePolicyService.update(context, newPolicy);
}
}
}
private void addCustomPoliciesNotInPlace(Context context, DSpaceObject dso, List<ResourcePolicy> customPolicies)
throws SQLException, AuthorizeException {
boolean customPoliciesAlreadyInPlace = authorizeService
.findPoliciesByDSOAndType(context, dso, ResourcePolicy.TYPE_CUSTOM).size() > 0;
if (!customPoliciesAlreadyInPlace) {
authorizeService.addPolicies(context, customPolicies, dso);
}
}
/**
* Check whether or not there is already an RP on the given dso, which has actionId={@link Constants.READ} and
* resourceTypeId={@link ResourcePolicy.TYPE_CUSTOM}
*
* @param context DSpace context
* @param dso DSpace object to check for custom read RP
* @return True if there is no RP on the item with custom read RP, otherwise false
* @throws SQLException If something goes wrong retrieving the RP on the DSO
*/
private boolean isNotAlreadyACustomRPOfThisTypeOnDSO(Context context, DSpaceObject dso) throws SQLException {
List<ResourcePolicy> readRPs = resourcePolicyService.find(context, dso, Constants.READ);
for (ResourcePolicy readRP : readRPs) {
if (readRP.getRpType() != null && readRP.getRpType().equals(ResourcePolicy.TYPE_CUSTOM)) {
return false;
}
}
return true;
}
/**
* Check if the provided default policy should be appended or not to the final
* item. If an item has at least one custom READ policy any anonymous READ
* policy with empty start/end date should be skipped
*
* @param context DSpace context
* @param dso DSpace object to check for custom read RP
* @param defaultPolicy The policy to check
* @return
* @throws SQLException If something goes wrong retrieving the RP on the DSO
*/
private boolean shouldBeAppended(Context context, DSpaceObject dso, ResourcePolicy defaultPolicy)
throws SQLException {
boolean hasCustomPolicy = resourcePolicyService.find(context, dso, Constants.READ)
.stream()
.filter(rp -> (Objects.nonNull(rp.getRpType()) &&
Objects.equals(rp.getRpType(), ResourcePolicy.TYPE_CUSTOM)))
.findFirst()
.isPresent();
boolean isAnonimousGroup = Objects.nonNull(defaultPolicy.getGroup())
&& StringUtils.equals(defaultPolicy.getGroup().getName(), Group.ANONYMOUS);
boolean datesAreNull = Objects.isNull(defaultPolicy.getStartDate())
&& Objects.isNull(defaultPolicy.getEndDate());
return !(hasCustomPolicy && isAnonimousGroup && datesAreNull);
}
/**
* Returns an iterator of Items possessing the passed metadata field, or only

View File

@@ -68,7 +68,7 @@ public class Choices {
/**
* descriptive labels for confidence values
*/
private static final int[] confidenceValue = {
private static final int confidenceValue[] = {
CF_UNSET,
CF_NOVALUE,
CF_REJECTED,
@@ -78,7 +78,7 @@ public class Choices {
CF_UNCERTAIN,
CF_ACCEPTED,
};
private static final String[] confidenceText = {
private static final String confidenceText[] = {
"UNSET",
"NOVALUE",
"REJECTED",
@@ -94,7 +94,7 @@ public class Choices {
/**
* The set of values returned by the authority
*/
public Choice[] values = null;
public Choice values[] = null;
/**
* The confidence level that applies to all values in this result set
@@ -133,9 +133,9 @@ public class Choices {
* @param confidence confidence level
* @param more whether more values
*/
public Choices(Choice[] values, int start, int total, int confidence, boolean more) {
public Choices(Choice values[], int start, int total, int confidence, boolean more) {
super();
this.values = ArrayUtils.clone(values);
this.values = (Choice[]) ArrayUtils.clone(values);
this.start = start;
this.total = total;
this.confidence = confidence;
@@ -152,9 +152,9 @@ public class Choices {
* @param more whether more values
* @param defaultSelected default selected value
*/
public Choices(Choice[] values, int start, int total, int confidence, boolean more, int defaultSelected) {
public Choices(Choice values[], int start, int total, int confidence, boolean more, int defaultSelected) {
super();
this.values = ArrayUtils.clone(values);
this.values = (Choice[]) ArrayUtils.clone(values);
this.start = start;
this.total = total;
this.confidence = confidence;

View File

@@ -66,7 +66,7 @@ public class DCInputAuthority extends SelfNamedPlugin implements ChoiceAuthority
* The map of the input form reader associated to use for a specific java locale
*/
private static Map<Locale, DCInputsReader> dcis = null;
private static String[] pluginNames = null;
private static String pluginNames[] = null;
public DCInputAuthority() {
super();
@@ -87,7 +87,7 @@ public class DCInputAuthority extends SelfNamedPlugin implements ChoiceAuthority
initPluginNames();
}
return ArrayUtils.clone(pluginNames);
return (String[]) ArrayUtils.clone(pluginNames);
}
private static synchronized void initPluginNames() {
@@ -179,7 +179,7 @@ public class DCInputAuthority extends SelfNamedPlugin implements ChoiceAuthority
String[] labelsLocale = labels.get(currentLocale.getLanguage());
for (int i = 0; i < valuesLocale.length; ++i) {
if (text.equalsIgnoreCase(valuesLocale[i])) {
Choice[] v = new Choice[1];
Choice v[] = new Choice[1];
v[0] = new Choice(String.valueOf(i), valuesLocale[i], labelsLocale[i]);
return new Choices(v, 0, v.length, Choices.CF_UNCERTAIN, false, 0);
}

View File

@@ -35,25 +35,23 @@ import org.xml.sax.InputSource;
* from {@code ${dspace.dir}/config/controlled-vocabularies/*.xml} and turns
* them into autocompleting authorities.
*
* <p>Configuration: This MUST be configured as a self-named plugin, e.g.: {@code
* plugin.selfnamed.org.dspace.content.authority.ChoiceAuthority =
* Configuration: This MUST be configured as a self-named plugin, e.g.: {@code
* plugin.selfnamed.org.dspace.content.authority.ChoiceAuthority = \
* org.dspace.content.authority.DSpaceControlledVocabulary
* }
*
* <p>It AUTOMATICALLY configures a plugin instance for each XML file in the
* It AUTOMATICALLY configures a plugin instance for each XML file in the
* controlled vocabularies directory. The name of the plugin is the basename of
* the file; e.g., {@code ${dspace.dir}/config/controlled-vocabularies/nsi.xml}
* would generate a plugin called "nsi".
*
* <p>Each configured plugin comes with three configuration options:
* <ul>
* <li>{@code vocabulary.plugin._plugin_.hierarchy.store = <true|false>
* # Store entire hierarchy along with selected value. Default: TRUE}</li>
* <li>{@code vocabulary.plugin._plugin_.hierarchy.suggest =
* <true|false> # Display entire hierarchy in the suggestion list. Default: TRUE}</li>
* <li>{@code vocabulary.plugin._plugin_.delimiter = "<string>"
* # Delimiter to use when building hierarchy strings. Default: "::"}</li>
* </ul>
* Each configured plugin comes with three configuration options: {@code
* vocabulary.plugin._plugin_.hierarchy.store = <true|false>
* # Store entire hierarchy along with selected value. Default: TRUE
* vocabulary.plugin._plugin_.hierarchy.suggest =
* <true|false> # Display entire hierarchy in the suggestion list. Default: TRUE
* vocabulary.plugin._plugin_.delimiter = "<string>"
* # Delimiter to use when building hierarchy strings. Default: "::"
* }
*
* @author Michael B. Klein
@@ -61,17 +59,16 @@ import org.xml.sax.InputSource;
public class DSpaceControlledVocabulary extends SelfNamedPlugin implements HierarchicalAuthority {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger();
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(DSpaceControlledVocabulary.class);
protected static String xpathTemplate = "//node[contains(translate(@label,'ABCDEFGHIJKLMNOPQRSTUVWXYZ'," +
"'abcdefghijklmnopqrstuvwxyz'),%s)]";
protected static String idTemplate = "//node[@id = %s]";
protected static String idTemplateQuoted = "//node[@id = '%s']";
protected static String labelTemplate = "//node[@label = %s]";
"'abcdefghijklmnopqrstuvwxyz'),'%s')]";
protected static String idTemplate = "//node[@id = '%s']";
protected static String labelTemplate = "//node[@label = '%s']";
protected static String idParentTemplate = "//node[@id = '%s']/parent::isComposedBy/parent::node";
protected static String rootTemplate = "/node";
protected static String idAttribute = "id";
protected static String labelAttribute = "label";
protected static String[] pluginNames = null;
protected static String pluginNames[] = null;
protected String vocabularyName = null;
protected InputSource vocabulary = null;
protected Boolean suggestHierarchy = false;
@@ -97,7 +94,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
initPluginNames();
}
return ArrayUtils.clone(pluginNames);
return (String[]) ArrayUtils.clone(pluginNames);
}
private static synchronized void initPluginNames() {
@@ -113,7 +110,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
File.separator + "config" +
File.separator + "controlled-vocabularies";
String[] xmlFiles = (new File(vocabulariesPath)).list(new xmlFilter());
List<String> names = new ArrayList<>();
List<String> names = new ArrayList<String>();
for (String filename : xmlFiles) {
names.add((new File(filename)).getName().replace(".xml", ""));
}
@@ -181,23 +178,15 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
public Choices getMatches(String text, int start, int limit, String locale) {
init(locale);
log.debug("Getting matches for '" + text + "'");
String xpathExpression = "";
String[] textHierarchy = text.split(hierarchyDelimiter, -1);
StringBuilder xpathExpressionBuilder = new StringBuilder();
for (int i = 0; i < textHierarchy.length; i++) {
xpathExpressionBuilder.append(String.format(xpathTemplate, "$var" + i));
xpathExpression +=
String.format(xpathTemplate, textHierarchy[i].replaceAll("'", "&apos;").toLowerCase());
}
String xpathExpression = xpathExpressionBuilder.toString();
XPath xpath = XPathFactory.newInstance().newXPath();
xpath.setXPathVariableResolver(variableName -> {
String varName = variableName.getLocalPart();
if (varName.startsWith("var")) {
int index = Integer.parseInt(varName.substring(3));
return textHierarchy[index].toLowerCase();
}
throw new IllegalArgumentException("Unexpected variable: " + varName);
});
int total;
List<Choice> choices;
int total = 0;
List<Choice> choices = new ArrayList<Choice>();
try {
NodeList results = (NodeList) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODESET);
total = results.getLength();
@@ -213,23 +202,15 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
@Override
public Choices getBestMatch(String text, String locale) {
init(locale);
log.debug("Getting best matches for {}'", text);
log.debug("Getting best matches for '" + text + "'");
String xpathExpression = "";
String[] textHierarchy = text.split(hierarchyDelimiter, -1);
StringBuilder xpathExpressionBuilder = new StringBuilder();
for (int i = 0; i < textHierarchy.length; i++) {
xpathExpressionBuilder.append(String.format(valueTemplate, "$var" + i));
xpathExpression +=
String.format(valueTemplate, textHierarchy[i].replaceAll("'", "&apos;"));
}
String xpathExpression = xpathExpressionBuilder.toString();
XPath xpath = XPathFactory.newInstance().newXPath();
xpath.setXPathVariableResolver(variableName -> {
String varName = variableName.getLocalPart();
if (varName.startsWith("var")) {
int index = Integer.parseInt(varName.substring(3));
return textHierarchy[index];
}
throw new IllegalArgumentException("Unexpected variable: " + varName);
});
List<Choice> choices;
List<Choice> choices = new ArrayList<Choice>();
try {
NodeList results = (NodeList) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODESET);
choices = getChoicesFromNodeList(results, 0, 1);
@@ -277,7 +258,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
@Override
public Choices getChoicesByParent(String authorityName, String parentId, int start, int limit, String locale) {
init(locale);
String xpathExpression = String.format(idTemplateQuoted, parentId);
String xpathExpression = String.format(idTemplate, parentId);
return getChoicesByXpath(xpathExpression, start, limit);
}
@@ -301,12 +282,15 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
}
private boolean isRootElement(Node node) {
return node != null && node.getOwnerDocument().getDocumentElement().equals(node);
if (node != null && node.getOwnerDocument().getDocumentElement().equals(node)) {
return true;
}
return false;
}
private Node getNode(String key, String locale) throws XPathExpressionException {
init(locale);
String xpathExpression = String.format(idTemplateQuoted, key);
String xpathExpression = String.format(idTemplate, key);
Node node = getNodeFromXPath(xpathExpression);
return node;
}
@@ -318,7 +302,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
}
private List<Choice> getChoicesFromNodeList(NodeList results, int start, int limit) {
List<Choice> choices = new ArrayList<>();
List<Choice> choices = new ArrayList<Choice>();
for (int i = 0; i < results.getLength(); i++) {
if (i < start) {
continue;
@@ -337,17 +321,17 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
private Map<String, String> addOtherInformation(String parentCurr, String noteCurr,
List<String> childrenCurr, String authorityCurr) {
Map<String, String> extras = new HashMap<>();
Map<String, String> extras = new HashMap<String, String>();
if (StringUtils.isNotBlank(parentCurr)) {
extras.put("parent", parentCurr);
}
if (StringUtils.isNotBlank(noteCurr)) {
extras.put("note", noteCurr);
}
if (childrenCurr.isEmpty()) {
extras.put("hasChildren", "false");
} else {
if (childrenCurr.size() > 0) {
extras.put("hasChildren", "true");
} else {
extras.put("hasChildren", "false");
}
extras.put("id", authorityCurr);
return extras;
@@ -402,7 +386,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
}
private List<String> getChildren(Node node) {
List<String> children = new ArrayList<>();
List<String> children = new ArrayList<String>();
NodeList childNodes = node.getChildNodes();
for (int ci = 0; ci < childNodes.getLength(); ci++) {
Node firstChild = childNodes.item(ci);
@@ -425,7 +409,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
private boolean isSelectable(Node node) {
Node selectableAttr = node.getAttributes().getNamedItem("selectable");
if (null != selectableAttr) {
return Boolean.parseBoolean(selectableAttr.getNodeValue());
return Boolean.valueOf(selectableAttr.getNodeValue());
} else { // Default is true
return true;
}
@@ -452,7 +436,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
}
private Choices getChoicesByXpath(String xpathExpression, int start, int limit) {
List<Choice> choices = new ArrayList<>();
List<Choice> choices = new ArrayList<Choice>();
XPath xpath = XPathFactory.newInstance().newXPath();
try {
Node parentNode = (Node) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODE);

View File

@@ -14,7 +14,7 @@ package org.dspace.content.authority;
public class SampleAuthority implements ChoiceAuthority {
private String pluginInstanceName;
protected static String[] values = {
protected static String values[] = {
"sun",
"mon",
"tue",
@@ -24,7 +24,7 @@ public class SampleAuthority implements ChoiceAuthority {
"sat"
};
protected static String[] labels = {
protected static String labels[] = {
"Sunday",
"Monday",
"Tuesday",
@@ -37,7 +37,7 @@ public class SampleAuthority implements ChoiceAuthority {
@Override
public Choices getMatches(String query, int start, int limit, String locale) {
int dflt = -1;
Choice[] v = new Choice[values.length];
Choice v[] = new Choice[values.length];
for (int i = 0; i < values.length; ++i) {
v[i] = new Choice(String.valueOf(i), values[i], labels[i]);
if (values[i].equalsIgnoreCase(query)) {
@@ -51,7 +51,7 @@ public class SampleAuthority implements ChoiceAuthority {
public Choices getBestMatch(String text, String locale) {
for (int i = 0; i < values.length; ++i) {
if (text.equalsIgnoreCase(values[i])) {
Choice[] v = new Choice[1];
Choice v[] = new Choice[1];
v[0] = new Choice(String.valueOf(i), values[i], labels[i]);
return new Choices(v, 0, v.length, Choices.CF_UNCERTAIN, false, 0);
}

View File

@@ -46,7 +46,7 @@ public class AIPDIMCrosswalk
*/
@Override
public Namespace[] getNamespaces() {
Namespace[] result = new Namespace[1];
Namespace result[] = new Namespace[1];
result[0] = XSLTCrosswalk.DIM_NS;
return result;
}

View File

@@ -98,7 +98,7 @@ public class AIPTechMDCrosswalk implements IngestionCrosswalk, DisseminationCros
*/
@Override
public Namespace[] getNamespaces() {
Namespace[] result = new Namespace[1];
Namespace result[] = new Namespace[1];
result[0] = XSLTCrosswalk.DIM_NS;
return result;
}

View File

@@ -45,7 +45,7 @@ public class DIMDisseminationCrosswalk
protected final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
private static final Namespace[] namespaces = {DIM_NS};
private static final Namespace namespaces[] = {DIM_NS};
@Override
public Namespace[] getNamespaces() {

View File

@@ -63,7 +63,7 @@ public class METSDisseminationCrosswalk
private static final Namespace METS_NS = Namespace
.getNamespace("mets", "http://www.loc.gov/METS/");
private static final Namespace[] namespaces = {METS_NS, MODS_NS, XLINK_NS};
private static final Namespace namespaces[] = {METS_NS, MODS_NS, XLINK_NS};
/**
* URL of METS XML Schema

View File

@@ -70,7 +70,7 @@ public class METSRightsCrosswalk
private String schemaLocation =
METSRights_NS.getURI() + " http://cosimo.stanford.edu/sdr/metsrights.xsd";
private static final Namespace[] namespaces = {METSRights_NS};
private static final Namespace namespaces[] = {METSRights_NS};
private static final Map<Integer, String> otherTypesMapping = new HashMap<Integer, String>();

View File

@@ -109,7 +109,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
* Fill in the plugin alias table from DSpace configuration entries
* for configuration files for flavors of MODS crosswalk:
*/
private static String[] aliases = null;
private static String aliases[] = null;
static {
List<String> aliasList = new ArrayList<>();
@@ -117,11 +117,11 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
for (String key : keys) {
aliasList.add(key.substring(CONFIG_PREFIX.length()));
}
aliases = aliasList.toArray(new String[0]);
aliases = (String[]) aliasList.toArray(new String[aliasList.size()]);
}
public static String[] getPluginNames() {
return ArrayUtils.clone(aliases);
return (String[]) ArrayUtils.clone(aliases);
}
/**
@@ -133,7 +133,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
private static final Namespace XLINK_NS =
Namespace.getNamespace("xlink", "http://www.w3.org/1999/xlink");
private static final Namespace[] namespaces = {MODS_NS, XLINK_NS};
private static final Namespace namespaces[] = {MODS_NS, XLINK_NS};
/**
* URL of MODS XML Schema
@@ -259,7 +259,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
while (pe.hasMoreElements()) {
String qdc = pe.nextElement();
String val = modsConfig.getProperty(qdc);
String[] pair = val.split("\\s+\\|\\s+", 2);
String pair[] = val.split("\\s+\\|\\s+", 2);
if (pair.length < 2) {
log.warn("Illegal MODS mapping in " + propsFile.toString() + ", line = " +
qdc + " = " + val);

View File

@@ -65,7 +65,7 @@ public class OREDisseminationCrosswalk
.getConfigurationService();
private static final Namespace[] namespaces = {ATOM_NS, ORE_NS, ORE_ATOM, RDF_NS, DCTERMS_NS, DS_NS};
private static final Namespace namespaces[] = {ATOM_NS, ORE_NS, ORE_ATOM, RDF_NS, DCTERMS_NS, DS_NS};
@Override
@@ -331,14 +331,14 @@ public class OREDisseminationCrosswalk
* @param sourceString source unescaped string
*/
private String encodeForURL(String sourceString) {
Character[] lowalpha = {'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i',
Character lowalpha[] = {'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i',
'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r',
's', 't', 'u', 'v', 'w', 'x', 'y', 'z'};
Character[] upalpha = {'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I',
Character upalpha[] = {'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I',
'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R',
'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z'};
Character[] digit = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9'};
Character[] mark = {'-', '_', '.', '!', '~', '*', '\'', '/', '(', ')'};
Character digit[] = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9'};
Character mark[] = {'-', '_', '.', '!', '~', '*', '\'', '/', '(', ')'};
// reserved
//Character reserved[] = {';' , '/' , '?' , ':' , '@' , '&' , '=' , '+' , '$' , ',' ,'%', '#'};

View File

@@ -220,17 +220,17 @@ public class OREIngestionCrosswalk
* @param sourceString source unescaped string
*/
private String encodeForURL(String sourceString) {
Character[] lowalpha = {'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i',
Character lowalpha[] = {'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i',
'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r',
's', 't', 'u', 'v', 'w', 'x', 'y', 'z'};
Character[] upalpha = {'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I',
Character upalpha[] = {'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I',
'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R',
'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z'};
Character[] digit = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9'};
Character[] mark = {'-', '_', '.', '!', '~', '*', '\'', '(', ')'};
Character digit[] = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9'};
Character mark[] = {'-', '_', '.', '!', '~', '*', '\'', '(', ')'};
// reserved
Character[] reserved = {';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '%', '#'};
Character reserved[] = {';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '%', '#'};
Set<Character> URLcharsSet = new HashSet<Character>();
URLcharsSet.addAll(Arrays.asList(lowalpha));

View File

@@ -20,7 +20,9 @@ import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.Bundle;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamFormatService;
import org.dspace.content.service.BitstreamService;
@@ -56,7 +58,7 @@ public class PREMISCrosswalk
private final String schemaLocation =
PREMIS_NS.getURI() + " http://www.loc.gov/standards/premis/PREMIS-v1-0.xsd";
private static final Namespace[] namespaces = {PREMIS_NS};
private static final Namespace namespaces[] = {PREMIS_NS};
protected BitstreamService bitstreamService
= ContentServiceFactory.getInstance().getBitstreamService();
@@ -222,17 +224,29 @@ public class PREMISCrosswalk
// c. made-up name based on sequence ID and extension.
String sid = String.valueOf(bitstream.getSequenceID());
String baseUrl = configurationService.getProperty("dspace.ui.url");
String handle = null;
// get handle of parent Item of this bitstream, if there is one:
List<Bundle> bn = bitstream.getBundles();
if (bn.size() > 0) {
List<Item> bi = bn.get(0).getItems();
if (bi.size() > 0) {
handle = bi.get(0).getHandle();
}
}
// get or make up name for bitstream:
String bsName = bitstream.getName();
if (bsName == null) {
List<String> ext = bitstream.getFormat(context).getExtensions();
bsName = "bitstream_" + sid + (ext.size() > 0 ? ext.get(0) : "");
}
if (baseUrl != null) {
if (handle != null && baseUrl != null) {
oiv.setText(baseUrl
+ "/bitstreams/"
+ bitstream.getID()
+ "/download");
+ "/bitstream/"
+ URLEncoder.encode(handle, "UTF-8")
+ "/"
+ sid
+ "/"
+ URLEncoder.encode(bsName, "UTF-8"));
} else {
oiv.setText(URLEncoder.encode(bsName, "UTF-8"));
}

View File

@@ -109,7 +109,7 @@ public class QDCCrosswalk extends SelfNamedPlugin
private final Map<String, String> element2qdc = new HashMap<>();
// the XML namespaces from config file for this name.
private Namespace[] namespaces = null;
private Namespace namespaces[] = null;
private static final Namespace DCTERMS_NS =
Namespace.getNamespace("dcterms", "http://purl.org/dc/terms/");
@@ -139,7 +139,7 @@ public class QDCCrosswalk extends SelfNamedPlugin
* Fill in the plugin-name table from DSpace configuration entries
* for configuration files for flavors of QDC crosswalk:
*/
private static String[] aliases = null;
private static String aliases[] = null;
static {
initStatic();
@@ -433,7 +433,7 @@ public class QDCCrosswalk extends SelfNamedPlugin
if ("qualifieddc".equals(me.getName())) {
ingest(context, dso, me.getChildren(), createMissingMetadataFields);
} else if (element2qdc.containsKey(key)) {
String[] qdc = (element2qdc.get(key)).split("\\.");
String qdc[] = (element2qdc.get(key)).split("\\.");
MetadataField metadataField;
if (qdc.length == 3) {

View File

@@ -73,7 +73,7 @@ public class RoleCrosswalk
*/
@Override
public Namespace[] getNamespaces() {
Namespace[] result = new Namespace[1];
Namespace result[] = new Namespace[1];
result[0] = RoleDisseminator.DSROLES_NS;
return result;
}

View File

@@ -53,10 +53,10 @@ public class SimpleDCDisseminationCrosswalk extends SelfNamedPlugin
private static final String schemaLocation =
DC_NS.getURI() + " " + DC_XSD;
private static final Namespace[] namespaces =
private static final Namespace namespaces[] =
{DC_NS, XSI_NS};
private static final String[] aliases = {"SimpleDC", "DC"};
private static final String aliases[] = {"SimpleDC", "DC"};
protected final ItemService itemService = ContentServiceFactory.getInstance().getItemService();

View File

@@ -106,7 +106,7 @@ public class XSLTDisseminationCrosswalk
protected static final ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
private static final String[] aliases = makeAliases(DIRECTION);
private static final String aliases[] = makeAliases(DIRECTION);
public static String[] getPluginNames() {
return (String[]) ArrayUtils.clone(aliases);
@@ -116,7 +116,7 @@ public class XSLTDisseminationCrosswalk
// until there's an instance, so do it in constructor.
private String schemaLocation = null;
private Namespace[] namespaces = null;
private Namespace namespaces[] = null;
private boolean preferList = false;

View File

@@ -63,7 +63,7 @@ public class XSLTIngestionCrosswalk
private static final String DIRECTION = "submission";
private static final String[] aliases = makeAliases(DIRECTION);
private static final String aliases[] = makeAliases(DIRECTION);
private static final CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
private static final CollectionService collectionService = ContentServiceFactory.getInstance()

View File

@@ -178,7 +178,7 @@ public class BitstreamDAOImpl extends AbstractHibernateDSODAO<Bitstream> impleme
@Override
public int countWithNoPolicy(Context context) throws SQLException {
Query query = createQuery(context,
"SELECT count(bit.id) from Bitstream bit where bit.deleted<>true and bit not in" +
"SELECT count(bit.id) from Bitstream bit where bit.deleted<>true and bit.id not in" +
" (select res.dSpaceObject from ResourcePolicy res where res.resourceTypeId = " +
":typeId )");
query.setParameter("typeId", Constants.BITSTREAM);

View File

@@ -12,7 +12,6 @@ import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import jakarta.persistence.Query;
import jakarta.persistence.criteria.CriteriaBuilder;
@@ -20,7 +19,6 @@ import jakarta.persistence.criteria.CriteriaQuery;
import jakarta.persistence.criteria.Join;
import jakarta.persistence.criteria.Predicate;
import jakarta.persistence.criteria.Root;
import org.apache.logging.log4j.Logger;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.authorize.ResourcePolicy_;
import org.dspace.content.Collection;
@@ -42,11 +40,6 @@ import org.dspace.eperson.Group;
* @author kevinvandevelde at atmire.com
*/
public class CollectionDAOImpl extends AbstractHibernateDSODAO<Collection> implements CollectionDAO {
/**
* log4j logger
*/
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(CollectionDAOImpl.class);
protected CollectionDAOImpl() {
super();
}
@@ -166,7 +159,7 @@ public class CollectionDAOImpl extends AbstractHibernateDSODAO<Collection> imple
@Override
public List<Collection> findCollectionsWithSubscribers(Context context) throws SQLException {
return list(createQuery(context, "SELECT DISTINCT c FROM Collection c JOIN Subscription s ON c = " +
return list(createQuery(context, "SELECT DISTINCT c FROM Collection c JOIN Subscription s ON c.id = " +
"s.dSpaceObject"));
}
@@ -179,25 +172,14 @@ public class CollectionDAOImpl extends AbstractHibernateDSODAO<Collection> imple
@SuppressWarnings("unchecked")
public List<Map.Entry<Collection, Long>> getCollectionsWithBitstreamSizesTotal(Context context)
throws SQLException {
String q = "select col.id, sum(bit.sizeBytes) as totalBytes from Item i join i.collections col " +
"join i.bundles bun join bun.bitstreams bit group by col.id";
String q = "select col as collection, sum(bit.sizeBytes) as totalBytes from Item i join i.collections col " +
"join i.bundles bun join bun.bitstreams bit group by col";
Query query = createQuery(context, q);
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
List<Object[]> list = query.getResultList();
List<Map.Entry<Collection, Long>> returnList = new ArrayList<>(list.size());
for (Object[] o : list) {
CriteriaQuery<Collection> criteriaQuery = criteriaBuilder.createQuery(Collection.class);
Root<Collection> collectionRoot = criteriaQuery.from(Collection.class);
criteriaQuery.select(collectionRoot).where(criteriaBuilder.equal(collectionRoot.get("id"), (UUID) o[0]));
Query collectionQuery = createQuery(context, criteriaQuery);
Collection collection = (Collection) collectionQuery.getSingleResult();
if (collection != null) {
returnList.add(new AbstractMap.SimpleEntry<>(collection, (Long) o[1]));
} else {
log.warn("Unable to find Collection with UUID: {}", o[0]);
}
returnList.add(new AbstractMap.SimpleEntry<>((Collection) o[0], (Long) o[1]));
}
return returnList;
}

View File

@@ -560,7 +560,7 @@ public abstract class AbstractMETSDisseminator
//create our metadata element (dmdSec, techMd, sourceMd, rightsMD etc.)
MdSec mdSec = (MdSec) mdSecClass.getDeclaredConstructor().newInstance();
mdSec.setID(gensym(mdSec.getLocalName()));
String[] parts = typeSpec.split(":", 2);
String parts[] = typeSpec.split(":", 2);
String xwalkName;
String metsName;
@@ -685,7 +685,7 @@ public abstract class AbstractMETSDisseminator
// add either a techMd or sourceMd element to amdSec.
// mdSecClass determines which type.
// mdTypes[] is array of "[metsName:]PluginName" strings, maybe empty.
protected void addToAmdSec(AmdSec fAmdSec, String[] mdTypes, Class mdSecClass,
protected void addToAmdSec(AmdSec fAmdSec, String mdTypes[], Class mdSecClass,
Context context, DSpaceObject dso,
PackageParameters params,
MdStreamCache extraStreams)
@@ -708,10 +708,10 @@ public abstract class AbstractMETSDisseminator
IOException, AuthorizeException, NoSuchMethodException,
InstantiationException, IllegalAccessException, IllegalArgumentException,
IllegalArgumentException, InvocationTargetException {
String[] techMdTypes = getTechMdTypes(context, dso, params);
String[] rightsMdTypes = getRightsMdTypes(context, dso, params);
String[] sourceMdTypes = getSourceMdTypes(context, dso, params);
String[] digiprovMdTypes = getDigiprovMdTypes(context, dso, params);
String techMdTypes[] = getTechMdTypes(context, dso, params);
String rightsMdTypes[] = getRightsMdTypes(context, dso, params);
String sourceMdTypes[] = getSourceMdTypes(context, dso, params);
String digiprovMdTypes[] = getDigiprovMdTypes(context, dso, params);
// only bother if there are any sections to add
if ((techMdTypes.length + sourceMdTypes.length +
@@ -794,10 +794,10 @@ public abstract class AbstractMETSDisseminator
// add DMD sections
// Each type element MAY be either just a MODS-and-crosswalk name, OR
// a combination "MODS-name:crosswalk-name" (e.g. "DC:qDC").
String[] dmdTypes = getDmdTypes(context, dso, params);
String dmdTypes[] = getDmdTypes(context, dso, params);
// record of ID of each dmdsec to make DMDID in structmap.
String[] dmdId = new String[dmdTypes.length];
String dmdId[] = new String[dmdTypes.length];
for (int i = 0; i < dmdTypes.length; ++i) {
MdSec dmdSec = makeMdSec(context, dso, DmdSec.class, dmdTypes[i], params, extraStreams);
if (dmdSec != null) {
@@ -981,7 +981,7 @@ public abstract class AbstractMETSDisseminator
// add metadata & info for Template Item, if exists
Item templateItem = collection.getTemplateItem();
if (templateItem != null) {
String[] templateDmdId = new String[dmdTypes.length];
String templateDmdId[] = new String[dmdTypes.length];
// index where we should add the first template item <dmdSec>.
// Index = number of <dmdSecs> already added + number of <metsHdr> = # of dmdSecs + 1
// (Note: in order to be a valid METS file, all dmdSecs must be before the 1st amdSec)
@@ -1239,8 +1239,8 @@ public abstract class AbstractMETSDisseminator
try {
// add crosswalk's namespaces and schemaLocation to this element:
String raw = xwalk.getSchemaLocation();
String[] sloc = raw == null ? null : raw.split("\\s+");
Namespace[] ns = xwalk.getNamespaces();
String sloc[] = raw == null ? null : raw.split("\\s+");
Namespace ns[] = xwalk.getNamespaces();
for (int i = 0; i < ns.length; ++i) {
String uri = ns[i].getURI();
if (sloc != null && sloc.length > 1 && uri.equals(sloc[0])) {

View File

@@ -498,11 +498,8 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester {
// Finish creating the item. This actually assigns the handle,
// and will either install item immediately or start a workflow, based on params
PackageUtils.finishCreateItem(context, wsi, handle, params);
} else {
// We should have a workspace item during ingest, so this code is only here for safety.
// Update the object to make sure all changes are committed
PackageUtils.updateDSpaceObject(context, dso);
}
} else if (type == Constants.COLLECTION || type == Constants.COMMUNITY) {
// Add logo if one is referenced from manifest
addContainerLogo(context, dso, manifest, pkgFile, params);
@@ -516,9 +513,6 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester {
// (this allows subclasses to do some final validation / changes as
// necessary)
finishObject(context, dso, params);
// Update the object to make sure all changes are committed
PackageUtils.updateDSpaceObject(context, dso);
} else if (type == Constants.SITE) {
// Do nothing by default -- Crosswalks will handle anything necessary to ingest at Site-level
@@ -526,15 +520,18 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester {
// (this allows subclasses to do some final validation / changes as
// necessary)
finishObject(context, dso, params);
// Update the object to make sure all changes are committed
PackageUtils.updateDSpaceObject(context, dso);
} else {
throw new PackageValidationException(
"Unknown DSpace Object type in package, type="
+ String.valueOf(type));
}
// -- Step 6 --
// Finish things up!
// Update the object to make sure all changes are committed
PackageUtils.updateDSpaceObject(context, dso);
return dso;
}
@@ -1422,7 +1419,7 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester {
* @throws AuthorizeException if authorization error
*/
public abstract void crosswalkObjectDmd(Context context, DSpaceObject dso,
METSManifest manifest, MdrefManager callback, Element[] dmds,
METSManifest manifest, MdrefManager callback, Element dmds[],
PackageParameters params) throws CrosswalkException,
PackageValidationException, AuthorizeException, SQLException,
IOException;

View File

@@ -109,7 +109,7 @@ public class DSpaceAIPIngester
public void crosswalkObjectDmd(Context context, DSpaceObject dso,
METSManifest manifest,
MdrefManager callback,
Element[] dmds, PackageParameters params)
Element dmds[], PackageParameters params)
throws CrosswalkException, PackageValidationException,
AuthorizeException, SQLException, IOException {
int found = -1;

View File

@@ -138,7 +138,7 @@ public class DSpaceMETSDisseminator
// XXX FIXME maybe let dmd choices be configured in DSpace config?
String[] result = null;
String result[] = null;
if (params != null) {
result = params.getProperties("dmd");
}
@@ -163,7 +163,7 @@ public class DSpaceMETSDisseminator
public String[] getTechMdTypes(Context context, DSpaceObject dso, PackageParameters params)
throws SQLException, IOException, AuthorizeException {
if (dso.getType() == Constants.BITSTREAM) {
String[] result = new String[1];
String result[] = new String[1];
result[0] = "PREMIS";
return result;
} else {

View File

@@ -81,7 +81,7 @@ public class DSpaceMETSIngester
public void crosswalkObjectDmd(Context context, DSpaceObject dso,
METSManifest manifest,
MdrefManager callback,
Element[] dmds, PackageParameters params)
Element dmds[], PackageParameters params)
throws CrosswalkException, PackageValidationException,
AuthorizeException, SQLException, IOException {
int found = -1;
@@ -185,7 +185,7 @@ public class DSpaceMETSIngester
PluginService pluginService = CoreServiceFactory.getInstance().getPluginService();
// get the MediaFilter that would create this bundle:
String[] mfNames = pluginService.getAllPluginNames(MediaFilter.class);
String mfNames[] = pluginService.getAllPluginNames(MediaFilter.class);
for (int i = 0; i < mfNames.length; ++i) {
MediaFilter mf = (MediaFilter) pluginService.getNamedPlugin(MediaFilter.class, mfNames[i]);

View File

@@ -210,7 +210,7 @@ public class METSManifest {
// mets.xsd.dc = http://purl.org/dc/elements/1.1/ dc.xsd
// (filename is relative to {dspace_dir}/config/schemas/)
String spec = configurationService.getProperty(key);
String[] val = spec.trim().split("\\s+");
String val[] = spec.trim().split("\\s+");
if (val.length == 2) {
File xsd = new File(xsdPath1, val[1]);
if (!xsd.exists()) {
@@ -221,7 +221,7 @@ public class METSManifest {
} else {
try {
String u = xsd.toURI().toURL().toString();
if (!result.isEmpty()) {
if (result.length() > 0) {
result.append(" ");
}
result.append(val[0]).append(" ").append(u);
@@ -651,7 +651,7 @@ public class METSManifest {
String mimeType = mdWrap.getAttributeValue("MIMETYPE");
if (mimeType != null && mimeType.equalsIgnoreCase("text/xml")) {
byte[] value = Base64.decodeBase64(bin.getText().getBytes(StandardCharsets.UTF_8));
byte value[] = Base64.decodeBase64(bin.getText().getBytes(StandardCharsets.UTF_8));
Document mdd = parser.build(new ByteArrayInputStream(value));
List<Element> result = new ArrayList<>(1);
result.add(mdd.getRootElement());
@@ -719,7 +719,7 @@ public class METSManifest {
throw new MetadataValidationException(
"Invalid METS Manifest: mdWrap element with neither xmlData nor binData child.");
} else {
byte[] value = Base64.decodeBase64(bin.getText().getBytes(StandardCharsets.UTF_8));
byte value[] = Base64.decodeBase64(bin.getText().getBytes(StandardCharsets.UTF_8));
return new ByteArrayInputStream(value);
}
} else {
@@ -950,8 +950,8 @@ public class METSManifest {
public Element[] getDmdElements(String dmdList)
throws MetadataValidationException {
if (dmdList != null && !dmdList.isEmpty()) {
String[] dmdID = dmdList.split("\\s+");
Element[] result = new Element[dmdID.length];
String dmdID[] = dmdList.split("\\s+");
Element result[] = new Element[dmdID.length];
for (int i = 0; i < dmdID.length; ++i) {
result[i] = getElementByXPath("mets:dmdSec[@ID=\"" + dmdID[i] + "\"]", false);
@@ -980,7 +980,7 @@ public class METSManifest {
}
return new Element[0];
}
String[] amdID = amds.split("\\s+");
String amdID[] = amds.split("\\s+");
List<Element> resultList = new ArrayList<>();
for (int i = 0; i < amdID.length; ++i) {
List rmds = getElementByXPath("mets:amdSec[@ID=\"" + amdID[i] + "\"]", false).
@@ -1167,7 +1167,7 @@ public class METSManifest {
"Invalid METS Manifest: mdWrap element for streaming crosswalk without binData " +
"child.");
} else {
byte[] value = Base64.decodeBase64(bin.getText().getBytes(StandardCharsets.UTF_8));
byte value[] = Base64.decodeBase64(bin.getText().getBytes(StandardCharsets.UTF_8));
sxwalk.ingest(context, dso,
new ByteArrayInputStream(value),
mdWrap.getAttributeValue("MIMETYPE"));
@@ -1228,7 +1228,7 @@ public class METSManifest {
log.warn("Got no bitstream ADMID, file@ID=" + fileId);
return;
}
String[] amdID = amds.split("\\s+");
String amdID[] = amds.split("\\s+");
for (int i = 0; i < amdID.length; ++i) {
Element amdSec = getElementByXPath("mets:amdSec[@ID=\"" + amdID[i] + "\"]", false);
for (Iterator ti = amdSec.getChildren("techMD", metsNS).iterator(); ti.hasNext(); ) {
@@ -1262,7 +1262,7 @@ public class METSManifest {
log.warn("Got no bitstream ADMID, file@ID=" + fileId);
return;
}
String[] amdID = amds.split("\\s+");
String amdID[] = amds.split("\\s+");
for (int i = 0; i < amdID.length; ++i) {
Element amdSec = getElementByXPath("mets:amdSec[@ID=\"" + amdID[i] + "\"]", false);
for (Iterator ti = amdSec.getChildren("techMD", metsNS).iterator(); ti.hasNext(); ) {

View File

@@ -19,11 +19,11 @@ import java.util.List;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.logging.log4j.Logger;
import org.apache.pdfbox.Loader;
import org.apache.pdfbox.cos.COSDocument;
import org.apache.pdfbox.io.MemoryUsageSetting;
import org.apache.pdfbox.io.RandomAccessReadBuffer;
import org.apache.pdfbox.io.RandomAccessBufferedFileInputStream;
import org.apache.pdfbox.io.ScratchFile;
import org.apache.pdfbox.pdfparser.PDFParser;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDDocumentInformation;
import org.dspace.authorize.AuthorizeException;
@@ -78,10 +78,10 @@ public class PDFPackager
protected static final String BITSTREAM_FORMAT_NAME = "Adobe PDF";
protected static String[] aliases = {"PDF", "Adobe PDF", "pdf", "application/pdf"};
protected static String aliases[] = {"PDF", "Adobe PDF", "pdf", "application/pdf"};
public static String[] getPluginNames() {
return ArrayUtils.clone(aliases);
return (String[]) ArrayUtils.clone(aliases);
}
protected final BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();
@@ -331,24 +331,19 @@ public class PDFPackager
COSDocument cos = null;
try {
PDDocument document = null;
ScratchFile scratchFile = null;
try {
// Use up to 80% of JVM free memory and fall back to a temp file (unlimited size)
long useRAM = Runtime.getRuntime().freeMemory() * 80 / 100;
document = Loader.loadPDF(
new RandomAccessReadBuffer(metadata),
() -> new ScratchFile(MemoryUsageSetting.setupMixed(useRAM)));
long useRAM = Runtime.getRuntime().freeMemory() * 80 / 100; // use up to 80% of JVM free memory
scratchFile = new ScratchFile(
MemoryUsageSetting.setupMixed(useRAM)); // then fallback to temp file (unlimited size)
} catch (IOException ioe) {
log.warn("Error initializing scratch file: " + ioe.getMessage());
}
// sanity check: loaded PDF document must not be null.
if (document == null) {
throw new MetadataValidationException("The provided stream could not be parsed into a PDF document.");
}
PDFParser parser = new PDFParser(new RandomAccessBufferedFileInputStream(metadata), scratchFile);
parser.parse();
cos = parser.getDocument();
cos = document.getDocument();
// sanity check: PDFBox breaks on encrypted documents, so give up.
if (cos.getEncryptionDictionary() != null) {
throw new MetadataValidationException("This packager cannot accept an encrypted PDF document.");

View File

@@ -52,7 +52,7 @@ public class PackageParameters extends Properties {
Enumeration pe = request.getParameterNames();
while (pe.hasMoreElements()) {
String name = (String) pe.nextElement();
String[] v = request.getParameterValues(name);
String v[] = request.getParameterValues(name);
if (v.length == 0) {
result.setProperty(name, "");
} else if (v.length == 1) {

View File

@@ -72,7 +72,7 @@ public class PackageUtils {
// Map of metadata elements for Communities and Collections
// Format is alternating key/value in a straight array; use this
// to initialize hash tables that convert to and from.
protected static final String[] ccMetadataMap = {
protected static final String ccMetadataMap[] = {
// getMetadata() -> DC element.term
"name", "dc.title",
"introductory_text", "dc.description",

View File

@@ -143,7 +143,7 @@ public interface BundleService extends DSpaceObjectService<Bundle>, DSpaceObject
* @throws SQLException when an SQL error has occurred (querying DSpace)
* @throws AuthorizeException If the user can't make the changes
*/
public void setOrder(Context context, Bundle bundle, UUID[] bitstreamIds) throws AuthorizeException, SQLException;
public void setOrder(Context context, Bundle bundle, UUID bitstreamIds[]) throws AuthorizeException, SQLException;
int countTotal(Context context) throws SQLException;
}

View File

@@ -468,9 +468,6 @@ public abstract class AbstractHibernateDAO<T> implements GenericDAO<T> {
for (Map.Entry<String, Object> entry : equals.entrySet()) {
criteria.where(criteriaBuilder.equal(root.get(entry.getKey()), entry.getValue()));
}
criteria.orderBy(criteriaBuilder.asc(root.get("id")));
return executeCriteriaQuery(context, criteria, cacheable, maxResults, offset);
}

View File

@@ -192,7 +192,7 @@ public class LegacyPluginServiceImpl implements PluginService {
classname = sequenceConfig.get(iname);
}
Object[] result = (Object[]) Array.newInstance(interfaceClass, classname.length);
Object result[] = (Object[]) Array.newInstance(interfaceClass, classname.length);
for (int i = 0; i < classname.length; ++i) {
log.debug("Adding Sequence plugin for interface= " + iname + ", class=" + classname[i]);
result[i] = getAnonymousPlugin(classname[i]);
@@ -277,7 +277,7 @@ public class LegacyPluginServiceImpl implements PluginService {
for (String classname : selfNamedVals) {
try {
Class pluginClass = Class.forName(classname, true, loader);
String[] names = (String[]) pluginClass.getMethod("getPluginNames").
String names[] = (String[]) pluginClass.getMethod("getPluginNames").
invoke(null);
if (names == null || names.length == 0) {
log.error(
@@ -302,7 +302,7 @@ public class LegacyPluginServiceImpl implements PluginService {
}
// add info for a named plugin to cache, under all its names.
private int installNamedConfigs(String iname, String classname, String[] names)
private int installNamedConfigs(String iname, String classname, String names[])
throws ClassNotFoundException {
int found = 0;
for (int i = 0; i < names.length; ++i) {

View File

@@ -20,8 +20,6 @@ import java.util.Set;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.pdfbox.Loader;
import org.apache.pdfbox.io.RandomAccessReadBuffer;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDPage;
import org.dspace.authorize.AuthorizeException;
@@ -126,7 +124,7 @@ public class CitationDocumentServiceImpl implements CitationDocumentService, Ini
//Load enabled collections
String[] citationEnabledCollections = configurationService
.getArrayProperty("citation-page.enabled_collections");
citationEnabledCollectionsList = new ArrayList<String>(Arrays.asList(citationEnabledCollections));
citationEnabledCollectionsList = Arrays.asList(citationEnabledCollections);
//Load enabled communities, and add to collection-list
String[] citationEnabledCommunities = configurationService
@@ -266,7 +264,7 @@ public class CitationDocumentServiceImpl implements CitationDocumentService, Ini
private PDDocument loadDocumentFromDB(Context context, Bitstream bitstream) {
try (var inputStream = bitstreamService.retrieve(context, bitstream)) {
return Loader.loadPDF(new RandomAccessReadBuffer(inputStream));
return PDDocument.load(inputStream);
} catch (IOException | SQLException | AuthorizeException e) {
throw new RuntimeException(e);
}
@@ -309,4 +307,4 @@ public class CitationDocumentServiceImpl implements CitationDocumentService, Ini
}
}
}
}
}

View File

@@ -15,7 +15,6 @@ import java.io.OutputStream;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.pdfbox.Loader;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.thymeleaf.TemplateEngine;
import org.thymeleaf.context.Context;
@@ -85,7 +84,7 @@ public class PdfGenerator {
public PDDocument generate(String html) {
try (var out = new ByteArrayOutputStream()) {
generate(html, out);
return Loader.loadPDF(out.toByteArray());
return PDDocument.load(out.toByteArray());
} catch (IOException e) {
throw new RuntimeException(e);
}

View File

@@ -79,7 +79,7 @@ public class DayTableEmbargoSetter extends DefaultEmbargoSetter {
private Properties getTermProperties() {
Properties termProps = new Properties();
String[] terms = DSpaceServicesFactory.getInstance().getConfigurationService()
String terms[] = DSpaceServicesFactory.getInstance().getConfigurationService()
.getArrayProperty("embargo.terms.days");
if (terms != null) {

View File

@@ -79,7 +79,7 @@ public class EmbargoCLITool {
*
* @param argv the command line arguments given
*/
public static void main(String[] argv) {
public static void main(String argv[]) {
int status = 0;
Options options = new Options();

View File

@@ -199,19 +199,19 @@ public class EmbargoServiceImpl implements EmbargoService {
// return the schema part of "schema.element.qualifier" metadata field spec
protected String getSchemaOf(String field) {
String[] sa = field.split("\\.", 3);
String sa[] = field.split("\\.", 3);
return sa[0];
}
// return the element part of "schema.element.qualifier" metadata field spec, if any
protected String getElementOf(String field) {
String[] sa = field.split("\\.", 3);
String sa[] = field.split("\\.", 3);
return sa.length > 1 ? sa[1] : null;
}
// return the qualifier part of "schema.element.qualifier" metadata field spec, if any
protected String getQualifierOf(String field) {
String[] sa = field.split("\\.", 3);
String sa[] = field.split("\\.", 3);
return sa.length > 2 ? sa[2] : null;
}

View File

@@ -366,7 +366,7 @@ public class EPerson extends CacheableDSpaceObject implements DSpaceObjectLegacy
@Override
public String getName() {
return this.getFullName();
return getEmail();
}
String getDigestAlgorithm() {

View File

@@ -86,7 +86,7 @@ public class EPersonCLITool {
* @throws AuthorizeException Exception indicating the current user of the context does not have permission
* to perform a particular action.
*/
public static void main(String[] argv)
public static void main(String argv[])
throws ParseException, SQLException, AuthorizeException {
final OptionGroup VERBS = new OptionGroup();
VERBS.addOption(VERB_ADD);

View File

@@ -45,7 +45,7 @@ public class BasicDispatcher extends Dispatcher {
if (log.isDebugEnabled()) {
int n = 0;
for (Iterator i = cp.getFilters().iterator(); i.hasNext(); ++n) {
int[] f = (int[]) i.next();
int f[] = (int[]) i.next();
log.debug("Adding Consumer=\"" + cp.getName() + "\", instance="
+ cp.getConsumer().toString() + ", filter["
+ String.valueOf(n) + "]=(ObjMask="

View File

@@ -110,9 +110,9 @@ public class ConsumerProfile {
// Each "filter" is <objectTypes> + <eventTypes> : ...
filters = new ArrayList<>();
String[] part = filterString.trim().split(":");
String part[] = filterString.trim().split(":");
for (int j = 0; j < part.length; ++j) {
String[] fpart = part[j].split("\\+");
String fpart[] = part[j].split("\\+");
if (fpart.length != 2) {
log.error("Bad Filter clause in consumer stanza in Configuration entry for "
+ CONSUMER_PREFIX
@@ -120,10 +120,10 @@ public class ConsumerProfile {
+ ".consumers: "
+ part[j]);
} else {
int[] filter = new int[2];
int filter[] = new int[2];
filter[0] = 0;
filter[1] = 0;
String[] objectNames = fpart[0].split("\\|");
String objectNames[] = fpart[0].split("\\|");
for (int k = 0; k < objectNames.length; ++k) {
int ot = Event.parseObjectType(objectNames[k]);
if (ot == 0) {
@@ -135,7 +135,7 @@ public class ConsumerProfile {
filter[Event.SUBJECT_MASK] |= ot;
}
}
String[] eventNames = fpart[1].split("\\|");
String eventNames[] = fpart[1].split("\\|");
for (int k = 0; k < eventNames.length; ++k) {
int et = Event.parseEventType(eventNames[k]);
if (et == 0) {

View File

@@ -80,7 +80,7 @@ public class Event implements Serializable {
public static final int EVENT_MASK = 1; // mask of event type
// XXX NOTE: keep this up to date with any changes to event (action) types.
protected static final String[] eventTypeText = {"CREATE", "MODIFY",
protected static final String eventTypeText[] = {"CREATE", "MODIFY",
"MODIFY_METADATA", "ADD", "REMOVE", "DELETE", "INSTALL"};
/** XXX NOTE: These constants must be kept synchronized * */
@@ -590,7 +590,7 @@ public class Event implements Serializable {
public boolean pass(List<int[]> filters) {
boolean result = false;
for (int[] filter : filters) {
for (int filter[] : filters) {
if ((subjectType & filter[SUBJECT_MASK]) != 0 && (eventType & filter[EVENT_MASK]) != 0) {
result = true;
}

View File

@@ -172,10 +172,10 @@ public class EventServiceImpl implements EventService {
if (dispClass != null) {
try {
// all this to call a constructor with an argument
final Class[] argTypes = {String.class};
final Class argTypes[] = {String.class};
Constructor dc = Class.forName(dispClass).getConstructor(
argTypes);
Object[] args = new Object[1];
Object args[] = new Object[1];
args[0] = dispatcherName;
dispatcher = (Dispatcher) dc.newInstance(args);

View File

@@ -80,7 +80,7 @@ public class Report {
// create check list
public static LinkedHashMap<String, Check> checks() {
LinkedHashMap<String, Check> checks = new LinkedHashMap<>();
String[] check_names = DSpaceServicesFactory.getInstance().getConfigurationService()
String check_names[] = DSpaceServicesFactory.getInstance().getConfigurationService()
.getArrayProperty("healthcheck.checks");
PluginService pluginService = CoreServiceFactory.getInstance().getPluginService();
for (String check_name : check_names) {

View File

@@ -50,26 +50,26 @@ public class UserCheck extends Check {
info.put("Self registered", 0);
for (EPerson e : epersons) {
if (e.getEmail() != null && !e.getEmail().isEmpty()) {
if (e.getEmail() != null && e.getEmail().length() > 0) {
info.put("Have email", info.get("Have email") + 1);
}
if (e.canLogIn()) {
info.put("Can log in (password)",
info.get("Can log in (password)") + 1);
}
if (e.getFirstName() != null && !e.getFirstName().isEmpty()) {
if (e.getFirstName() != null && e.getFirstName().length() > 0) {
info.put("Have 1st name", info.get("Have 1st name") + 1);
}
if (e.getLastName() != null && !e.getLastName().isEmpty()) {
if (e.getLastName() != null && e.getLastName().length() > 0) {
info.put("Have 2nd name", info.get("Have 2nd name") + 1);
}
if (e.getLanguage() != null && !e.getLanguage().isEmpty()) {
if (e.getLanguage() != null && e.getLanguage().length() > 0) {
info.put("Have lang", info.get("Have lang") + 1);
}
if (e.getNetid() != null && !e.getNetid().isEmpty()) {
if (e.getNetid() != null && e.getNetid().length() > 0) {
info.put("Have netid", info.get("Have netid") + 1);
}
if (e.getNetid() != null && !e.getNetid().isEmpty()) {
if (e.getNetid() != null && e.getNetid().length() > 0) {
info.put("Self registered", info.get("Self registered") + 1);
}
}

View File

@@ -29,9 +29,9 @@ import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpException;
import org.apache.http.client.utils.URIBuilder;
import org.apache.jena.ext.xerces.impl.dv.util.Base64;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.xerces.impl.dv.util.Base64;
import org.dspace.app.util.XMLUtils;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;

View File

@@ -116,7 +116,7 @@ public class MetadataFieldConfig {
* @param full A string representing the schema.element.qualifier triplet
*/
public MetadataFieldConfig(String full) {
String[] elements = full.split("\\.");
String elements[] = full.split("\\.");
if (elements.length == 2) {
this.schema = elements[0];
this.element = elements[1];

View File

@@ -105,13 +105,4 @@ public class MetadatumDTO {
public void setValue(String value) {
this.value = value;
}
/**
* Return string representation of MetadatumDTO
* @return string representation of format "[schema].[element].[qualifier]=[value]"
*/
@Override
public String toString() {
return schema + "." + element + "." + qualifier + "=" + value;
}
}

View File

@@ -1,127 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/**
* This Processor extracts values from a JSON array, but only when a condition
* on another attribute is met. For example, to extract all values of
* /names/value where /names/types contains "ror_display".
*
* Configurable via:
* pathToArray: e.g., /names
* elementAttribute: e.g., /value
* filterAttribute: e.g., /types
* requiredValueInFilter: e.g., ror_display
*
* Supports filtering when the filter attribute is either a JSON array or a single string.
*
* Example JSON:
* {
* "items": [{
* "names": [
* { "types": ["label", "ror_display"], "value": "Instituto Federal do Piauí" },
* { "types": ["acronym"], "value": "IFPI" }
* ]
* }]
* }
* This processor can extract "Instituto Federal do Piauí" using proper configuration.
*
* Author: Jesiel (based on Mykhaylo Boychuks original processor)
*/
public class ConditionalArrayElementAttributeProcessor implements JsonPathMetadataProcessor {
private static final Logger log = LogManager.getLogger();
private String pathToArray;
private String elementAttribute;
private String filterAttribute;
private String requiredValueInFilter;
@Override
public Collection<String> processMetadata(String json) {
JsonNode rootNode = convertStringJsonToJsonNode(json);
Collection<String> results = new ArrayList<>();
if (rootNode == null) {
return results;
}
Iterator<JsonNode> array = rootNode.at(pathToArray).iterator();
while (array.hasNext()) {
JsonNode element = array.next();
JsonNode filterNode = element.at(filterAttribute);
boolean match = false;
if (filterNode.isArray()) {
for (JsonNode filterValue : filterNode) {
if (requiredValueInFilter.equalsIgnoreCase(filterValue.textValue())) {
match = true;
break;
}
}
} else if (filterNode.isTextual()) {
if (requiredValueInFilter.equalsIgnoreCase(filterNode.textValue())) {
match = true;
}
}
if (match) {
JsonNode valueNode = element.at(elementAttribute);
if (valueNode.isTextual()) {
results.add(valueNode.textValue());
} else if (valueNode.isArray()) {
for (JsonNode item : valueNode) {
if (item.isTextual() && StringUtils.isNotBlank(item.textValue())) {
results.add(item.textValue());
}
}
}
}
}
return results;
}
private JsonNode convertStringJsonToJsonNode(String json) {
ObjectMapper mapper = new ObjectMapper();
try {
return mapper.readTree(json);
} catch (JsonProcessingException e) {
log.error("Unable to process JSON response.", e);
return null;
}
}
public void setPathToArray(String pathToArray) {
this.pathToArray = pathToArray;
}
public void setElementAttribute(String elementAttribute) {
this.elementAttribute = elementAttribute;
}
public void setFilterAttribute(String filterAttribute) {
this.filterAttribute = filterAttribute;
}
public void setRequiredValueInFilter(String requiredValueInFilter) {
this.requiredValueInFilter = requiredValueInFilter;
}
}

View File

@@ -56,7 +56,6 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
private String urlFetch;
private String urlSearch;
private String apiKey;
private int attempt = 3;
@@ -212,9 +211,6 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
@Override
public Integer call() throws Exception {
URIBuilder uriBuilder = new URIBuilder(urlSearch);
if (StringUtils.isNotBlank(apiKey)) {
uriBuilder.addParameter("api_key", apiKey);
}
uriBuilder.addParameter("db", "pubmed");
uriBuilder.addParameter("term", query.getParameterAsClass("query", String.class));
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
@@ -291,9 +287,6 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
List<ImportRecord> records = new LinkedList<ImportRecord>();
URIBuilder uriBuilder = new URIBuilder(urlSearch);
if (StringUtils.isNotBlank(apiKey)) {
uriBuilder.addParameter("api_key", apiKey);
}
uriBuilder.addParameter("db", "pubmed");
uriBuilder.addParameter("retstart", start.toString());
uriBuilder.addParameter("retmax", count.toString());
@@ -324,9 +317,6 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
String webEnv = getSingleElementValue(response, "WebEnv");
URIBuilder uriBuilder2 = new URIBuilder(urlFetch);
if (StringUtils.isNotBlank(apiKey)) {
uriBuilder2.addParameter("api_key", apiKey);
}
uriBuilder2.addParameter("db", "pubmed");
uriBuilder2.addParameter("retstart", start.toString());
uriBuilder2.addParameter("retmax", count.toString());
@@ -399,9 +389,6 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
public ImportRecord call() throws Exception {
URIBuilder uriBuilder = new URIBuilder(urlFetch);
if (StringUtils.isNotBlank(apiKey)) {
uriBuilder.addParameter("api_key", apiKey);
}
uriBuilder.addParameter("db", "pubmed");
uriBuilder.addParameter("retmode", "xml");
uriBuilder.addParameter("id", query.getParameterAsClass("id", String.class));
@@ -442,9 +429,6 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
public Collection<ImportRecord> call() throws Exception {
URIBuilder uriBuilder = new URIBuilder(urlSearch);
if (StringUtils.isNotBlank(apiKey)) {
uriBuilder.addParameter("api_key", apiKey);
}
uriBuilder.addParameter("db", "pubmed");
uriBuilder.addParameter("usehistory", "y");
uriBuilder.addParameter("term", query.getParameterAsClass("term", String.class));
@@ -474,9 +458,6 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
String queryKey = getSingleElementValue(response, "QueryKey");
URIBuilder uriBuilder2 = new URIBuilder(urlFetch);
if (StringUtils.isNotBlank(apiKey)) {
uriBuilder.addParameter("api_key", apiKey);
}
uriBuilder2.addParameter("db", "pubmed");
uriBuilder2.addParameter("retmode", "xml");
uriBuilder2.addParameter("WebEnv", webEnv);
@@ -552,8 +533,4 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
this.urlSearch = urlSearch;
}
public void setApiKey(String apiKey) {
this.apiKey = apiKey;
}
}

View File

@@ -46,7 +46,7 @@ public class RorImportMetadataSourceServiceImpl extends AbstractImportMetadataSo
private String url;
private int timeout = 5000;
private int timeout = 1000;
@Autowired
private LiveImportClient liveImportClient;

View File

@@ -39,7 +39,6 @@ public class OrcidWorkFieldMapping {
* The metadata fields related to the work external identifiers.
*/
private Map<String, String> externalIdentifierFields = new HashMap<>();
private Map<String, List<String>> externalIdentifierPartOfMap = new HashMap<>();
/**
* The metadata field related to the work publication date.
@@ -130,15 +129,6 @@ public class OrcidWorkFieldMapping {
this.externalIdentifierFields = parseConfigurations(externalIdentifierFields);
}
public Map<String, List<String>> getExternalIdentifierPartOfMap() {
return this.externalIdentifierPartOfMap;
}
public void setExternalIdentifierPartOfMap(
HashMap<String, List<String>> externalIdentifierPartOfMap) {
this.externalIdentifierPartOfMap = externalIdentifierPartOfMap;
}
public String getPublicationDateField() {
return publicationDateField;
}

View File

@@ -9,7 +9,6 @@ package org.dspace.orcid.model.factory.impl;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.orcid.jaxb.model.common.Relationship.PART_OF;
import static org.orcid.jaxb.model.common.Relationship.SELF;
import java.util.ArrayList;
@@ -74,12 +73,12 @@ public class OrcidWorkFactory implements OrcidEntityFactory {
@Override
public Activity createOrcidObject(Context context, Item item) {
Work work = new Work();
work.setWorkType(getWorkType(context, item));
work.setJournalTitle(getJournalTitle(context, item));
work.setWorkContributors(getWorkContributors(context, item));
work.setWorkTitle(getWorkTitle(context, item));
work.setPublicationDate(getPublicationDate(context, item));
work.setWorkExternalIdentifiers(getWorkExternalIds(context, item, work));
work.setWorkExternalIdentifiers(getWorkExternalIds(context, item));
work.setWorkType(getWorkType(context, item));
work.setShortDescription(getShortDescription(context, item));
work.setLanguageCode(getLanguageCode(context, item));
work.setUrl(getUrl(context, item));
@@ -150,65 +149,57 @@ public class OrcidWorkFactory implements OrcidEntityFactory {
}
/**
* Returns a list of external work IDs constructed in the org.orcid.jaxb
* ExternalIDs object
* Creates an instance of ExternalIDs from the metadata values of the given
* item, using the orcid.mapping.funding.external-ids configuration.
*/
private ExternalIDs getWorkExternalIds(Context context, Item item, Work work) {
ExternalIDs externalIDs = new ExternalIDs();
externalIDs.getExternalIdentifier().addAll(getWorkExternalIdList(context, item, work));
return externalIDs;
private ExternalIDs getWorkExternalIds(Context context, Item item) {
ExternalIDs externalIdentifiers = new ExternalIDs();
externalIdentifiers.getExternalIdentifier().addAll(getWorkSelfExternalIds(context, item));
return externalIdentifiers;
}
/**
* Creates a list of ExternalID, one for orcid.mapping.funding.external-ids
* value, taking the values from the given item and work type.
* value, taking the values from the given item.
*/
private List<ExternalID> getWorkExternalIdList(Context context, Item item, Work work) {
private List<ExternalID> getWorkSelfExternalIds(Context context, Item item) {
List<ExternalID> externalIds = new ArrayList<>();
List<ExternalID> selfExternalIds = new ArrayList<>();
Map<String, String> externalIdentifierFields = fieldMapping.getExternalIdentifierFields();
if (externalIdentifierFields.containsKey(SIMPLE_HANDLE_PLACEHOLDER)) {
String handleType = externalIdentifierFields.get(SIMPLE_HANDLE_PLACEHOLDER);
ExternalID handle = new ExternalID();
handle.setType(handleType);
handle.setValue(item.getHandle());
handle.setRelationship(SELF);
externalIds.add(handle);
selfExternalIds.add(getExternalId(handleType, item.getHandle(), SELF));
}
// Resolve work type, used to determine identifier relationship type
// For version / funding relationships, we might want to use more complex
// business rules than just "work and id type"
final String workType = (work != null && work.getWorkType() != null) ?
work.getWorkType().value() : WorkType.OTHER.value();
getMetadataValues(context, item, externalIdentifierFields.keySet()).stream()
.map(metadataValue -> this.getExternalId(metadataValue, workType))
.forEach(externalIds::add);
.map(this::getSelfExternalId)
.forEach(selfExternalIds::add);
return externalIds;
return selfExternalIds;
}
/**
* Creates an instance of ExternalID taking the value from the given
* metadataValue. The type of the ExternalID is calculated using the
* orcid.mapping.funding.external-ids configuration. The relationship of the
* ExternalID is SELF.
*/
private ExternalID getSelfExternalId(MetadataValue metadataValue) {
Map<String, String> externalIdentifierFields = fieldMapping.getExternalIdentifierFields();
String metadataField = metadataValue.getMetadataField().toString('.');
return getExternalId(externalIdentifierFields.get(metadataField), metadataValue.getValue(), SELF);
}
/**
* Creates an instance of ExternalID with the given type, value and
* relationship.
*/
private ExternalID getExternalId(MetadataValue metadataValue, String workType) {
Map<String, String> externalIdentifierFields = fieldMapping.getExternalIdentifierFields();
Map<String, List<String>> externalIdentifierPartOfMap = fieldMapping.getExternalIdentifierPartOfMap();
String metadataField = metadataValue.getMetadataField().toString('.');
String identifierType = externalIdentifierFields.get(metadataField);
// Default relationship type is SELF, configuration can
// override to PART_OF based on identifier and work type
Relationship relationship = SELF;
if (externalIdentifierPartOfMap.containsKey(identifierType)
&& externalIdentifierPartOfMap.get(identifierType).contains(workType)) {
relationship = PART_OF;
}
private ExternalID getExternalId(String type, String value, Relationship relationship) {
ExternalID externalID = new ExternalID();
externalID.setType(identifierType);
externalID.setValue(metadataValue.getValue());
externalID.setType(type);
externalID.setValue(value);
externalID.setRelationship(relationship);
return externalID;
}

View File

@@ -283,8 +283,6 @@ public class ResearcherProfileServiceImpl implements ResearcherProfileService {
itemService.addMetadata(context, item, "dc", "title", null, null, fullName);
itemService.addMetadata(context, item, "person", "email", null, null, ePerson.getEmail());
itemService.addMetadata(context, item, "dspace", "object", "owner", null, fullName, id, CF_ACCEPTED);
itemService.addMetadata(context, item, "person", "familyName", null, null, ePerson.getLastName());
itemService.addMetadata(context, item, "person", "givenName", null, null, ePerson.getFirstName());
item = installItemService.installItem(context, workspaceItem);

View File

@@ -117,7 +117,7 @@ public abstract class DSpaceRunnable<T extends ScriptConfiguration> implements R
* @param args The primitive array of Strings representing the parameters
* @throws ParseException If something goes wrong
*/
protected StepResult parse(String[] args) throws ParseException {
private StepResult parse(String[] args) throws ParseException {
commandLine = new DefaultParser().parse(getScriptConfiguration().getOptions(), args);
setup();
return StepResult.Continue;

View File

@@ -34,7 +34,6 @@ import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
@@ -232,10 +231,6 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
throw new RuntimeException(e);
}
if (dspaceObject instanceof Bitstream && !isBitstreamLoggable((Bitstream) dspaceObject)) {
return;
}
if (solr == null) {
return;
}
@@ -283,10 +278,6 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
@Override
public void postView(DSpaceObject dspaceObject,
String ip, String userAgent, String xforwardedfor, EPerson currentUser, String referrer) {
if (dspaceObject instanceof Bitstream && !isBitstreamLoggable((Bitstream) dspaceObject)) {
return;
}
if (solr == null) {
return;
}
@@ -1626,35 +1617,4 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
throw new UnknownHostException("unknown ip format");
}
/**
* Checks if a given Bitstream's bundles are configured to be logged in Solr statistics.
*
* @param bitstream The bitstream to check.
* @return {@code true} if the bitstream event should be logged, {@code false} otherwise.
*/
private boolean isBitstreamLoggable(Bitstream bitstream) {
String[] allowedBundles = configurationService
.getArrayProperty("solr-statistics.query.filter.bundles");
if (allowedBundles == null || allowedBundles.length == 0) {
return true;
}
List<String> allowedBundlesList = Arrays.asList(allowedBundles);
try {
List<Bundle> actualBundles = bitstream.getBundles();
if (actualBundles.isEmpty()) {
return true;
}
for (Bundle bundle : actualBundles) {
if (allowedBundlesList.contains(bundle.getName())) {
return true;
}
}
} catch (SQLException e) {
log.error("Error checking bitstream bundles for logging statistics for bitstream {}",
bitstream.getID(), e);
return true;
}
return false;
}
}

View File

@@ -137,10 +137,9 @@ public abstract class ExportEventProcessor {
.append(URLEncoder.encode(clientUA, UTF_8));
String hostName = Utils.getHostName(configurationService.getProperty("dspace.ui.url"));
String oaiPrefix = configurationService.getProperty("oai.identifier.prefix");
data.append("&").append(URLEncoder.encode("rft.artnum", UTF_8)).append("=").
append(URLEncoder.encode("oai:" + oaiPrefix + ":" + item
append(URLEncoder.encode("oai:" + hostName + ":" + item
.getHandle(), UTF_8));
data.append("&").append(URLEncoder.encode("rfr_dat", UTF_8)).append("=")
.append(URLEncoder.encode(referer, UTF_8));

View File

@@ -429,7 +429,7 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
//modulo
if ((processedCounter % batchCommitSize) == 0) {
log.info("Migration Commit Checkpoint: " + processedCounter);
context.commit();
context.dispatchEvents();
}
}

View File

@@ -19,11 +19,9 @@ import java.security.NoSuchAlgorithmException;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Bitstream;
import org.dspace.core.Utils;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
* Native DSpace (or "Directory Scatter" if you prefer) asset store.
@@ -254,10 +252,7 @@ public class DSBitStoreService extends BaseBitStoreService {
}
File bitstreamFile = new File(bufFilename.toString());
Path normalizedPath = bitstreamFile.toPath().normalize();
String[] allowedAssetstoreRoots = DSpaceServicesFactory.getInstance().getConfigurationService()
.getArrayProperty("assetstore.allowed.roots", new String[]{});
if (!normalizedPath.startsWith(baseDir.getAbsolutePath())
&& !StringUtils.startsWithAny(normalizedPath.toString(), allowedAssetstoreRoots)) {
if (!normalizedPath.startsWith(baseDir.getAbsolutePath())) {
log.error("Bitstream path outside of assetstore root requested:" +
"bitstream={}, path={}, assetstore={}",
bitstream.getID(), normalizedPath, baseDir.getAbsolutePath());

View File

@@ -123,6 +123,16 @@ public class DatabaseUtils {
// Print basic database connection information
printDBInfo(connection);
// Print any database warnings/errors found (if any)
boolean issueFound = printDBIssues(connection);
// If issues found, exit with an error status (even if connection succeeded).
if (issueFound) {
System.exit(1);
} else {
System.exit(0);
}
} catch (SQLException sqle) {
System.err.println("\nError running 'test': ");
System.err.println(" - " + sqle);
@@ -160,6 +170,16 @@ public class DatabaseUtils {
"migrate'.");
}
}
// Print any database warnings/errors found (if any)
boolean issueFound = printDBIssues(connection);
// If issues found, exit with an error status
if (issueFound) {
System.exit(1);
} else {
System.exit(0);
}
} catch (SQLException e) {
System.err.println("Info exception:");
e.printStackTrace(System.err);
@@ -315,6 +335,31 @@ public class DatabaseUtils {
}
try (Connection connection = dataSource.getConnection()) {
String dbType = getDbType(connection);
// Not all Postgres user accounts will be able to run a 'clean',
// as only 'superuser' accounts can remove the 'pgcrypto' extension.
if (dbType.equals(DBMS_POSTGRES)) {
// Check if database user has permissions suitable to run a clean
if (!PostgresUtils.checkCleanPermissions(connection)) {
String username = connection.getMetaData().getUserName();
// Exit immediately, providing a descriptive error message
System.out.println(
"\nERROR: The database user '" + username + "' does not have sufficient " +
"privileges to run a 'database clean' (via Flyway).");
System.out.println(
"\nIn order to run a 'clean', the database user MUST have 'superuser' privileges");
System.out.println(
"OR the '" + PostgresUtils.PGCRYPTO + "' extension must be installed in a " +
"separate schema (see documentation).");
System.out.println(
"\nOptionally, you could also manually remove the '" + PostgresUtils.PGCRYPTO +
"' extension first (DROP EXTENSION " + PostgresUtils.PGCRYPTO +
" CASCADE;), then rerun the 'clean'");
System.exit(1);
}
}
BufferedReader input = new BufferedReader(new InputStreamReader(System.in,
StandardCharsets.UTF_8));
@@ -323,6 +368,11 @@ public class DatabaseUtils {
.println("\nWARNING: ALL DATA AND TABLES IN YOUR DATABASE WILL BE PERMANENTLY DELETED.\n");
System.out.println("There is NO turning back from this action. Backup your DB before " +
"continuing.");
if (dbType.equals(DBMS_POSTGRES)) {
System.out.println(
"\nPOSTGRES WARNING: the '" + PostgresUtils.PGCRYPTO + "' extension will be dropped " +
"if it is in the same schema as the DSpace database.\n");
}
System.out.print("Do you want to PERMANENTLY DELETE everything from your database? [y/n]: ");
String choiceString = input.readLine();
input.close();
@@ -426,10 +476,108 @@ public class DatabaseUtils {
"Database Software: " + meta.getDatabaseProductName() + " version " + meta.getDatabaseProductVersion());
System.out.println("Database Driver: " + meta.getDriverName() + " version " + meta.getDriverVersion());
// For Postgres, report whether pgcrypto is installed
// (If it isn't, we'll also write out warnings...see below)
if (dbType.equals(DBMS_POSTGRES)) {
boolean pgcryptoUpToDate = PostgresUtils.isPgcryptoUpToDate();
Double pgcryptoVersion = PostgresUtils.getPgcryptoInstalledVersion(connection);
System.out.println(
"PostgreSQL '" + PostgresUtils.PGCRYPTO + "' extension installed/up-to-date? " + pgcryptoUpToDate + "" +
" " + ((pgcryptoVersion != null) ? "(version=" + pgcryptoVersion + ")" : "(not installed)"));
}
// Finally, print out our version of Flyway
System.out.println("FlywayDB Version: " + VersionPrinter.getVersion());
}
/**
* Print any warnings about current database setup to System.err (if any).
* This is utilized by both the 'test' and 'info' commandline options.
*
* @param connection current database connection
* @return boolean true if database issues found, false otherwise
* @throws SQLException if database error occurs
*/
private static boolean printDBIssues(Connection connection) throws SQLException {
boolean issueFound = false;
// Get the DB Type
String dbType = getDbType(connection);
// For PostgreSQL databases, we need to check for the 'pgcrypto' extension.
// If it is NOT properly installed, we'll need to warn the user, as DSpace will be unable to proceed.
if (dbType.equals(DBMS_POSTGRES)) {
// Get version of pgcrypto available in this postgres instance
Double pgcryptoAvailable = PostgresUtils.getPgcryptoAvailableVersion(connection);
// Generic requirements message
String requirementsMsg = "\n** DSpace REQUIRES PostgreSQL >= " + PostgresUtils.POSTGRES_VERSION + " AND "
+ PostgresUtils.PGCRYPTO + " extension >= " + PostgresUtils.PGCRYPTO_VERSION + " **\n";
// Check if installed in PostgreSQL & a supported version
if (pgcryptoAvailable != null && pgcryptoAvailable.compareTo(PostgresUtils.PGCRYPTO_VERSION) >= 0) {
// We now know it's available in this Postgres. Let's see if it is installed in this database.
Double pgcryptoInstalled = PostgresUtils.getPgcryptoInstalledVersion(connection);
// Check if installed in database, but outdated version
if (pgcryptoInstalled != null && pgcryptoInstalled.compareTo(PostgresUtils.PGCRYPTO_VERSION) < 0) {
System.out.println(
"\nWARNING: Required PostgreSQL '" + PostgresUtils.PGCRYPTO + "' extension is OUTDATED " +
"(installed version=" + pgcryptoInstalled + ", available version = " + pgcryptoAvailable
+ ").");
System.out.println(requirementsMsg);
System.out.println(
"To update it, please connect to your DSpace database as a 'superuser' and manually run the " +
"following command: ");
System.out.println(
"\n ALTER EXTENSION " + PostgresUtils.PGCRYPTO + " UPDATE TO '" + pgcryptoAvailable + "';\n");
issueFound = true;
} else if (pgcryptoInstalled == null) {
// If it's not installed in database
System.out.println(
"\nWARNING: Required PostgreSQL '" + PostgresUtils.PGCRYPTO + "' extension is NOT INSTALLED " +
"on this database.");
System.out.println(requirementsMsg);
System.out.println(
"To install it, please connect to your DSpace database as a 'superuser' and manually run the " +
"following command: ");
System.out.println("\n CREATE EXTENSION " + PostgresUtils.PGCRYPTO + ";\n");
issueFound = true;
}
} else if (pgcryptoAvailable != null && pgcryptoAvailable.compareTo(PostgresUtils.PGCRYPTO_VERSION) < 0) {
// If installed in Postgres, but an unsupported version
System.out.println(
"\nWARNING: UNSUPPORTED version of PostgreSQL '" + PostgresUtils.PGCRYPTO + "' extension found " +
"(version=" + pgcryptoAvailable + ").");
System.out.println(requirementsMsg);
System.out.println(
"Make sure you are running a supported version of PostgreSQL, and then install " + PostgresUtils
.PGCRYPTO + " version >= " + PostgresUtils.PGCRYPTO_VERSION);
System.out.println(
"The '" + PostgresUtils.PGCRYPTO + "' extension is often provided in the 'postgresql-contrib' " +
"package for your operating system.");
issueFound = true;
} else if (pgcryptoAvailable == null) {
// If it's not installed in Postgres
System.out.println(
"\nWARNING: PostgreSQL '" + PostgresUtils.PGCRYPTO + "' extension is NOT AVAILABLE. Please " +
"install it into this PostgreSQL instance.");
System.out.println(requirementsMsg);
System.out.println(
"The '" + PostgresUtils.PGCRYPTO + "' extension is often provided in the 'postgresql-contrib' " +
"package for your operating system.");
System.out.println(
"Once the extension is installed globally, please connect to your DSpace database as a " +
"'superuser' and manually run the following command: ");
System.out.println("\n CREATE EXTENSION " + PostgresUtils.PGCRYPTO + ";\n");
issueFound = true;
}
}
return issueFound;
}
/**
* Setup/Initialize the Flyway Configuration to run against our DSpace database
* and point at our migration scripts.

View File

@@ -0,0 +1,149 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.storage.rdbms;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import org.apache.logging.log4j.Logger;
import org.flywaydb.core.api.FlywayException;
import org.flywaydb.core.api.callback.Callback;
import org.flywaydb.core.api.callback.Context;
import org.flywaydb.core.api.callback.Event;
/**
* This is a FlywayCallback class which automatically verifies that "pgcrypto"
* is at the proper version before running any database migrations.
* <P>
* When running PostgreSQL, pgcrypto is REQUIRED by DSpace as it allows UUIDs
* to be generated.
* <P>
* During a database "clean", this also de-registers "pgcrypto" proir to the
* full database clean.
*
* @author Tim Donohue
*/
public class PostgreSQLCryptoChecker implements Callback {
private Logger log = org.apache.logging.log4j.LogManager.getLogger(PostgreSQLCryptoChecker.class);
/**
* Check for pgcrypto (if needed). Throws an exception if pgcrypto is
* not installed or needs an upgrade.
*
* @param connection database connection
*/
public void checkPgCrypto(Connection connection) {
String dbType;
try {
dbType = DatabaseUtils.getDbType(connection);
} catch (SQLException se) {
throw new FlywayException("Unable to determine database type.", se);
}
// ONLY Check if this is a PostgreSQL database
if (dbType != null && dbType.equals(DatabaseUtils.DBMS_POSTGRES)) {
// If this is a PostgreSQL database, then a supported version
// of the 'pgcrypto' extension MUST be installed to continue.
// Check if pgcrypto is both installed & a supported version
if (!PostgresUtils.isPgcryptoUpToDate()) {
throw new FlywayException(
"This PostgreSQL Database is INCOMPATIBLE with DSpace. The upgrade will NOT proceed. " +
"A supported version (>=" + PostgresUtils.PGCRYPTO_VERSION + ") of the '" + PostgresUtils
.PGCRYPTO + "' extension must be installed! " +
"Please run 'dspace database info' for additional info/tips.");
}
}
}
/**
* Remove pgcrypto (if necessary).
* <P>
* The pgcrypto extension MUST be removed before a clean or else errors occur.
* This method checks if it needs to be removed and, if so, removes it.
*
* @param connection database connection
*/
public void removePgCrypto(Connection connection) {
try {
String dbType = DatabaseUtils.getDbType(connection);
// ONLY remove if this is a PostgreSQL database
if (dbType != null && dbType.equals(DatabaseUtils.DBMS_POSTGRES)) {
// Get current schema
String schema = DatabaseUtils.getSchemaName(connection);
// Check if pgcrypto is in this schema
// If so, it MUST be removed before a 'clean'
if (PostgresUtils.isPgcryptoInSchema(schema)) {
// remove the extension
try (Statement statement = connection.createStatement()) {
// WARNING: ONLY superusers can remove pgcrypto. However, at this point,
// we have already verified user acct permissions via PostgresUtils.checkCleanPermissions()
// (which is called prior to a 'clean' being triggered).
statement.execute("DROP EXTENSION " + PostgresUtils.PGCRYPTO + " CASCADE");
}
}
}
} catch (SQLException e) {
throw new FlywayException("Failed to check for and/or remove '" + PostgresUtils.PGCRYPTO + "' extension",
e);
}
}
/**
* The callback name, Flyway will use this to sort the callbacks alphabetically before executing them
* @return The callback name
*/
@Override
public String getCallbackName() {
// Return class name only (not prepended by package)
return PostgreSQLCryptoChecker.class.getSimpleName();
}
/**
* Events supported by this callback.
* @param event Flyway event
* @param context Flyway context
* @return true if BEFORE_BASELINE, BEFORE_MIGRATE or BEFORE_CLEAN
*/
@Override
public boolean supports(Event event, Context context) {
return event.equals(Event.BEFORE_BASELINE) || event.equals(Event.BEFORE_MIGRATE) ||
event.equals(Event.BEFORE_CLEAN);
}
/**
* Whether event can be handled in a transaction or whether it must be handle outside of transaction.
* @param event Flyway event
* @param context Flyway context
* @return true
*/
@Override
public boolean canHandleInTransaction(Event event, Context context) {
return true;
}
/**
* What to run when the callback is triggered.
* @param event Flyway event
* @param context Flyway context
*/
@Override
public void handle(Event event, Context context) {
// If, before initializing or migrating database, check for pgcrypto
// Else, before Cleaning database, remove pgcrypto (if exists)
if (event.equals(Event.BEFORE_BASELINE) || event.equals(Event.BEFORE_MIGRATE)) {
checkPgCrypto(context.getConnection());
} else if (event.equals(Event.BEFORE_CLEAN)) {
removePgCrypto(context.getConnection());
}
}
}

View File

@@ -0,0 +1,218 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.storage.rdbms;
import static org.dspace.storage.rdbms.DatabaseUtils.getSchemaName;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import javax.sql.DataSource;
import org.flywaydb.core.api.FlywayException;
/**
* Database utility class specific to Postgres.
* This class contains tools and methods which are useful in determining
* the status of a PostgreSQL database backend. It's a companion class
* to DatabaseUtils, but PostgreSQL specific.
*
* @author Tim Donohue
*/
public class PostgresUtils {
// PostgreSQL pgcrypto extension name, and required versions of Postgres & pgcrypto
public static final String PGCRYPTO = "pgcrypto";
public static final Double PGCRYPTO_VERSION = 1.1;
public static final Double POSTGRES_VERSION = 9.4;
/**
* Default constructor
*/
private PostgresUtils() { }
/**
* Get version of pgcrypto extension available. The extension is "available"
* if it's been installed via operating system tools/packages. It also
* MUST be installed in the DSpace database (see getPgcryptoInstalled()).
* <P>
* The pgcrypto extension is required for Postgres databases
*
* @param connection database connection
* @return version number or null if not available
*/
protected static Double getPgcryptoAvailableVersion(Connection connection) {
Double version = null;
String checkPgCryptoAvailable = "SELECT default_version AS version FROM pg_available_extensions WHERE name=?";
// Run the query to obtain the version of 'pgcrypto' available
try (PreparedStatement statement = connection.prepareStatement(checkPgCryptoAvailable)) {
statement.setString(1, PGCRYPTO);
try (ResultSet results = statement.executeQuery()) {
if (results.next()) {
version = results.getDouble("version");
}
}
} catch (SQLException e) {
throw new FlywayException("Unable to determine whether 'pgcrypto' extension is available.", e);
}
return version;
}
/**
* Get version of pgcrypto extension installed in the DSpace database.
* <P>
* The pgcrypto extension is required for Postgres databases to support
* UUIDs.
*
* @param connection database connection
* @return version number or null if not installed
*/
protected static Double getPgcryptoInstalledVersion(Connection connection) {
Double version = null;
String checkPgCryptoInstalled = "SELECT extversion AS version FROM pg_extension WHERE extname=?";
// Run the query to obtain the version of 'pgcrypto' installed on this database
try (PreparedStatement statement = connection.prepareStatement(checkPgCryptoInstalled)) {
statement.setString(1, PGCRYPTO);
try (ResultSet results = statement.executeQuery()) {
if (results.next()) {
version = results.getDouble("version");
}
}
} catch (SQLException e) {
throw new FlywayException("Unable to determine whether 'pgcrypto' extension is installed.", e);
}
return version;
}
/**
* Check if the pgcrypto extension is BOTH installed AND up-to-date.
* <P>
* This requirement is only needed for PostgreSQL databases.
* It doesn't matter what schema pgcrypto is installed in, as long as it exists.
*
* @return true if everything is installed and up-to-date. False otherwise.
*/
public static boolean isPgcryptoUpToDate() {
// Get our configured dataSource
DataSource dataSource = DatabaseUtils.getDataSource();
try (Connection connection = dataSource.getConnection()) {
Double pgcryptoInstalled = getPgcryptoInstalledVersion(connection);
// Check if installed & up-to-date in this DSpace database
if (pgcryptoInstalled != null && pgcryptoInstalled.compareTo(PGCRYPTO_VERSION) >= 0) {
return true;
}
return false;
} catch (SQLException e) {
throw new FlywayException("Unable to determine whether 'pgcrypto' extension is up-to-date.", e);
}
}
/**
* Check if the pgcrypto extension is installed into a particular schema
* <P>
* This allows us to check if pgcrypto needs to be REMOVED prior to running
* a 'clean' on this database. If pgcrypto is in the same schema as the
* dspace database, a 'clean' will require removing pgcrypto FIRST.
*
* @param schema name of schema
* @return true if pgcrypto is in this schema. False otherwise.
*/
public static boolean isPgcryptoInSchema(String schema) {
// Get our configured dataSource
DataSource dataSource = DatabaseUtils.getDataSource();
try (Connection connection = dataSource.getConnection()) {
// Check if pgcrypto is installed in the current database schema.
String pgcryptoInstalledInSchema = "SELECT extversion FROM pg_extension,pg_namespace " +
"WHERE pg_extension.extnamespace=pg_namespace.oid " +
"AND extname=? " +
"AND nspname=?;";
Double pgcryptoVersion = null;
try (PreparedStatement statement = connection.prepareStatement(pgcryptoInstalledInSchema)) {
statement.setString(1, PGCRYPTO);
statement.setString(2, schema);
try (ResultSet results = statement.executeQuery()) {
if (results.next()) {
pgcryptoVersion = results.getDouble("extversion");
}
}
}
// If a pgcrypto version returns, it's installed in this schema
if (pgcryptoVersion != null) {
return true;
} else {
return false;
}
} catch (SQLException e) {
throw new FlywayException(
"Unable to determine whether 'pgcrypto' extension is installed in schema '" + schema + "'.", e);
}
}
/**
* Check if the current user has permissions to run a clean on existing
* database.
* <P>
* Mostly this just checks if you need to remove pgcrypto, and if so,
* whether you have permissions to do so.
*
* @param connection database connection
* @return true if permissions valid, false otherwise
*/
protected static boolean checkCleanPermissions(Connection connection) {
try {
// get username of our db user
String username = connection.getMetaData().getUserName();
// Check their permissions. Are they a 'superuser'?
String checkSuperuser = "SELECT rolsuper FROM pg_roles WHERE rolname=?;";
boolean superuser = false;
try (PreparedStatement statement = connection.prepareStatement(checkSuperuser)) {
statement.setString(1, username);
try (ResultSet results = statement.executeQuery()) {
if (results.next()) {
superuser = results.getBoolean("rolsuper");
}
}
} catch (SQLException e) {
throw new FlywayException("Unable to determine if user '" + username + "' is a superuser.", e);
}
// If user is a superuser, then "clean" can be run successfully
if (superuser) {
return true;
} else {
// Otherwise, we'll need to see which schema 'pgcrypto' is installed in
// Get current schema name
String schema = getSchemaName(connection);
// If pgcrypto is installed in this schema, then superuser privileges are needed to remove it
if (isPgcryptoInSchema(schema)) {
return false;
} else {
// otherwise, a 'clean' can be run by anyone
return true;
}
}
} catch (SQLException e) {
throw new FlywayException("Unable to determine if DB user has 'clean' privileges.", e);
}
}
}

View File

@@ -67,7 +67,6 @@ public class SubscriptionEmailNotificationServiceImpl implements SubscriptionEma
public void perform(Context context, DSpaceRunnableHandler handler, String subscriptionType, String frequency) {
List<IndexableObject> communityItems = new ArrayList<>();
List<IndexableObject> collectionsItems = new ArrayList<>();
EPerson currentEperson = context.getCurrentUser();
try {
List<Subscription> subscriptions =
findAllSubscriptionsBySubscriptionTypeAndFrequency(context, subscriptionType, frequency);
@@ -78,10 +77,7 @@ public class SubscriptionEmailNotificationServiceImpl implements SubscriptionEma
for (Subscription subscription : subscriptions) {
DSpaceObject dSpaceObject = subscription.getDSpaceObject();
EPerson ePerson = subscription.getEPerson();
// Set the current user to the subscribed eperson because the Solr query checks
// the permissions of the current user in the ANONYMOUS group.
// If there is no user (i.e., `current user = null`), it will send an email with no new items.
context.setCurrentUser(ePerson);
if (!authorizeService.authorizeActionBoolean(context, ePerson, dSpaceObject, READ, true)) {
iterator++;
continue;
@@ -130,8 +126,6 @@ public class SubscriptionEmailNotificationServiceImpl implements SubscriptionEma
handler.handleException(e);
context.abort();
}
// Reset the current user because it was changed to subscriber eperson
context.setCurrentUser(currentEperson);
}
@SuppressWarnings("rawtypes")

View File

@@ -58,7 +58,7 @@ public class PubMedToImport {
*/
private PubMedToImport() { }
public static void main(String[] args) {
public static void main(String args[]) {
Options options = new Options();
options.addOption(new Option("s", "source", true, "Source xml"));

View File

@@ -556,7 +556,7 @@ public class SolrImportExport {
YearMonth monthStartDate;
String monthStart = monthFacet.getValue();
try {
monthStartDate = YearMonth.parse(monthStart, DateTimeFormatter.ISO_OFFSET_DATE_TIME);
monthStartDate = YearMonth.parse(monthStart);
} catch (DateTimeParseException e) {
throw new SolrImportExportException("Could not read start of month batch as date: " + monthStart, e);
}

Some files were not shown because too many files have changed in this diff Show More