mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-13 04:53:16 +00:00
Merge branch 'main' into 3190
This commit is contained in:
@@ -54,7 +54,7 @@ services:
|
|||||||
dspacesolr:
|
dspacesolr:
|
||||||
container_name: dspacesolr
|
container_name: dspacesolr
|
||||||
# Uses official Solr image at https://hub.docker.com/_/solr/
|
# Uses official Solr image at https://hub.docker.com/_/solr/
|
||||||
image: solr:8.7
|
image: solr:8.8
|
||||||
networks:
|
networks:
|
||||||
dspacenet:
|
dspacenet:
|
||||||
ports:
|
ports:
|
||||||
|
@@ -466,6 +466,10 @@
|
|||||||
<artifactId>javax.servlet-api</artifactId>
|
<artifactId>javax.servlet-api</artifactId>
|
||||||
<scope>provided</scope>
|
<scope>provided</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>javax.annotation</groupId>
|
||||||
|
<artifactId>javax.annotation-api</artifactId>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>jaxen</groupId>
|
<groupId>jaxen</groupId>
|
||||||
<artifactId>jaxen</artifactId>
|
<artifactId>jaxen</artifactId>
|
||||||
@@ -529,8 +533,15 @@
|
|||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.mockito</groupId>
|
<groupId>org.mockito</groupId>
|
||||||
<artifactId>mockito-core</artifactId>
|
<artifactId>mockito-inline</artifactId>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
|
<exclusions>
|
||||||
|
<!-- Different version provided by hibernate-ehcache -->
|
||||||
|
<exclusion>
|
||||||
|
<groupId>net.bytebuddy</groupId>
|
||||||
|
<artifactId>byte-buddy</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.springframework</groupId>
|
<groupId>org.springframework</groupId>
|
||||||
@@ -544,19 +555,9 @@
|
|||||||
<version>1.0</version>
|
<version>1.0</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>gr.ekt.bte</groupId>
|
<groupId>org.jbibtex</groupId>
|
||||||
<artifactId>bte-io</artifactId>
|
<artifactId>jbibtex</artifactId>
|
||||||
<version>0.9.3.5</version>
|
<version>1.0.10</version>
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>net.bytebuddy</groupId>
|
|
||||||
<artifactId>byte-buddy</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>log4j</groupId>
|
|
||||||
<artifactId>log4j</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.httpcomponents</groupId>
|
<groupId>org.apache.httpcomponents</groupId>
|
||||||
|
@@ -0,0 +1,115 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.app.bulkedit;
|
||||||
|
|
||||||
|
import java.sql.SQLException;
|
||||||
|
|
||||||
|
import org.apache.commons.cli.ParseException;
|
||||||
|
import org.apache.commons.lang3.ArrayUtils;
|
||||||
|
import org.dspace.content.MetadataField;
|
||||||
|
import org.dspace.content.factory.ContentServiceFactory;
|
||||||
|
import org.dspace.content.service.MetadataFieldService;
|
||||||
|
import org.dspace.content.service.MetadataValueService;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
import org.dspace.scripts.DSpaceRunnable;
|
||||||
|
import org.dspace.services.ConfigurationService;
|
||||||
|
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||||
|
import org.dspace.utils.DSpace;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@link DSpaceRunnable} implementation to delete all the values of the given
|
||||||
|
* metadata field.
|
||||||
|
*
|
||||||
|
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public class MetadataDeletion extends DSpaceRunnable<MetadataDeletionScriptConfiguration<MetadataDeletion>> {
|
||||||
|
|
||||||
|
private MetadataValueService metadataValueService;
|
||||||
|
|
||||||
|
private MetadataFieldService metadataFieldService;
|
||||||
|
|
||||||
|
private ConfigurationService configurationService;
|
||||||
|
|
||||||
|
private String metadataField;
|
||||||
|
|
||||||
|
private boolean list;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void internalRun() throws Exception {
|
||||||
|
|
||||||
|
if (list) {
|
||||||
|
listErasableMetadata();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
Context context = new Context();
|
||||||
|
|
||||||
|
try {
|
||||||
|
context.turnOffAuthorisationSystem();
|
||||||
|
performMetadataValuesDeletion(context);
|
||||||
|
} finally {
|
||||||
|
context.restoreAuthSystemState();
|
||||||
|
context.complete();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void listErasableMetadata() {
|
||||||
|
String[] erasableMetadata = getErasableMetadata();
|
||||||
|
if (ArrayUtils.isEmpty(erasableMetadata)) {
|
||||||
|
handler.logInfo("No fields has been configured to be cleared via bulk deletion");
|
||||||
|
} else {
|
||||||
|
handler.logInfo("The fields that can be bulk deleted are: " + String.join(", ", erasableMetadata));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void performMetadataValuesDeletion(Context context) throws SQLException {
|
||||||
|
|
||||||
|
MetadataField field = metadataFieldService.findByString(context, metadataField, '.');
|
||||||
|
if (field == null) {
|
||||||
|
throw new IllegalArgumentException("No metadata field found with name " + metadataField);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!ArrayUtils.contains(getErasableMetadata(), metadataField)) {
|
||||||
|
throw new IllegalArgumentException("The given metadata field cannot be bulk deleted");
|
||||||
|
}
|
||||||
|
|
||||||
|
handler.logInfo(String.format("Deleting the field '%s' from all objects", metadataField));
|
||||||
|
|
||||||
|
metadataValueService.deleteByMetadataField(context, field);
|
||||||
|
}
|
||||||
|
|
||||||
|
private String[] getErasableMetadata() {
|
||||||
|
return configurationService.getArrayProperty("bulkedit.allow-bulk-deletion");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public MetadataDeletionScriptConfiguration<MetadataDeletion> getScriptConfiguration() {
|
||||||
|
return new DSpace().getServiceManager()
|
||||||
|
.getServiceByName("metadata-deletion", MetadataDeletionScriptConfiguration.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setup() throws ParseException {
|
||||||
|
|
||||||
|
metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService();
|
||||||
|
metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService();
|
||||||
|
configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||||
|
|
||||||
|
metadataField = commandLine.getOptionValue('m');
|
||||||
|
list = commandLine.hasOption('l');
|
||||||
|
|
||||||
|
if (!list && metadataField == null) {
|
||||||
|
throw new ParseException("One of the following parameters is required: -m or -l");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@@ -0,0 +1,18 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.app.bulkedit;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The {@link MetadataDeletion} for CLI.
|
||||||
|
*
|
||||||
|
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public class MetadataDeletionCli extends MetadataDeletion {
|
||||||
|
|
||||||
|
}
|
@@ -0,0 +1,18 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.app.bulkedit;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Script configuration for {@link MetadataDeletionCli}.
|
||||||
|
*
|
||||||
|
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public class MetadataDeletionCliScriptConfiguration extends MetadataDeletionScriptConfiguration<MetadataDeletionCli> {
|
||||||
|
|
||||||
|
}
|
@@ -0,0 +1,68 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.app.bulkedit;
|
||||||
|
|
||||||
|
import java.sql.SQLException;
|
||||||
|
|
||||||
|
import org.apache.commons.cli.Options;
|
||||||
|
import org.dspace.authorize.service.AuthorizeService;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The {@link ScriptConfiguration} for the {@link MetadataDeletion} script.
|
||||||
|
*/
|
||||||
|
public class MetadataDeletionScriptConfiguration<T extends MetadataDeletion> extends ScriptConfiguration<T> {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private AuthorizeService authorizeService;
|
||||||
|
|
||||||
|
private Class<T> dspaceRunnableClass;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isAllowedToExecute(Context context) {
|
||||||
|
try {
|
||||||
|
return authorizeService.isAdmin(context);
|
||||||
|
} catch (SQLException e) {
|
||||||
|
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Options getOptions() {
|
||||||
|
if (options == null) {
|
||||||
|
|
||||||
|
Options options = new Options();
|
||||||
|
|
||||||
|
options.addOption("m", "metadata", true, "metadata field name");
|
||||||
|
options.getOption("m").setType(String.class);
|
||||||
|
|
||||||
|
options.addOption("l", "list", false, "lists the metadata fields that can be deleted");
|
||||||
|
options.getOption("l").setType(boolean.class);
|
||||||
|
|
||||||
|
super.options = options;
|
||||||
|
}
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Class<T> getDspaceRunnableClass() {
|
||||||
|
return dspaceRunnableClass;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generic setter for the dspaceRunnableClass
|
||||||
|
* @param dspaceRunnableClass The dspaceRunnableClass to be set on this MetadataDeletionScriptConfiguration
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
|
||||||
|
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@@ -890,10 +890,10 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
|||||||
Entity relationEntity = getEntity(c, value);
|
Entity relationEntity = getEntity(c, value);
|
||||||
// Get relationship type of entity and item
|
// Get relationship type of entity and item
|
||||||
String relationEntityRelationshipType = itemService.getMetadata(relationEntity.getItem(),
|
String relationEntityRelationshipType = itemService.getMetadata(relationEntity.getItem(),
|
||||||
"relationship", "type",
|
"dspace", "entity",
|
||||||
null, Item.ANY).get(0).getValue();
|
"type", Item.ANY).get(0).getValue();
|
||||||
String itemRelationshipType = itemService.getMetadata(item, "relationship", "type",
|
String itemRelationshipType = itemService.getMetadata(item, "dspace", "entity",
|
||||||
null, Item.ANY).get(0).getValue();
|
"type", Item.ANY).get(0).getValue();
|
||||||
|
|
||||||
// Get the correct RelationshipType based on typeName
|
// Get the correct RelationshipType based on typeName
|
||||||
List<RelationshipType> relType = relationshipTypeService.findByLeftwardOrRightwardTypeName(c, typeName);
|
List<RelationshipType> relType = relationshipTypeService.findByLeftwardOrRightwardTypeName(c, typeName);
|
||||||
@@ -1487,7 +1487,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
//Populate entityTypeMap
|
//Populate entityTypeMap
|
||||||
if (key.equalsIgnoreCase("relationship.type") && line.get(key).size() > 0) {
|
if (key.equalsIgnoreCase("dspace.entity.type") && line.get(key).size() > 0) {
|
||||||
if (uuid == null) {
|
if (uuid == null) {
|
||||||
entityTypeMap.put(new UUID(0, rowCount), line.get(key).get(0));
|
entityTypeMap.put(new UUID(0, rowCount), line.get(key).get(0));
|
||||||
} else {
|
} else {
|
||||||
@@ -1651,8 +1651,8 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
|||||||
if (itemService.find(c, UUID.fromString(targetUUID)) != null) {
|
if (itemService.find(c, UUID.fromString(targetUUID)) != null) {
|
||||||
targetItem = itemService.find(c, UUID.fromString(targetUUID));
|
targetItem = itemService.find(c, UUID.fromString(targetUUID));
|
||||||
List<MetadataValue> relTypes = itemService.
|
List<MetadataValue> relTypes = itemService.
|
||||||
getMetadata(targetItem, "relationship", "type",
|
getMetadata(targetItem, "dspace", "entity",
|
||||||
null, Item.ANY);
|
"type", Item.ANY);
|
||||||
String relTypeValue = null;
|
String relTypeValue = null;
|
||||||
if (relTypes.size() > 0) {
|
if (relTypes.size() > 0) {
|
||||||
relTypeValue = relTypes.get(0).getValue();
|
relTypeValue = relTypes.get(0).getValue();
|
||||||
@@ -1696,9 +1696,9 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
|||||||
if (itemService.find(c, UUID.fromString(targetUUID)) != null) {
|
if (itemService.find(c, UUID.fromString(targetUUID)) != null) {
|
||||||
DSpaceCSVLine dSpaceCSVLine = this.csv.getCSVLines()
|
DSpaceCSVLine dSpaceCSVLine = this.csv.getCSVLines()
|
||||||
.get(Integer.valueOf(originRow) - 1);
|
.get(Integer.valueOf(originRow) - 1);
|
||||||
List<String> relTypes = dSpaceCSVLine.get("relationship.type");
|
List<String> relTypes = dSpaceCSVLine.get("dspace.entity.type");
|
||||||
if (relTypes == null || relTypes.isEmpty()) {
|
if (relTypes == null || relTypes.isEmpty()) {
|
||||||
dSpaceCSVLine.get("relationship.type[]");
|
dSpaceCSVLine.get("dspace.entity.type[]");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (relTypes != null && relTypes.size() > 0) {
|
if (relTypes != null && relTypes.size() > 0) {
|
||||||
@@ -1710,8 +1710,8 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
|||||||
originItem = itemService.find(c, UUID.fromString(originRefererUUID));
|
originItem = itemService.find(c, UUID.fromString(originRefererUUID));
|
||||||
if (originItem != null) {
|
if (originItem != null) {
|
||||||
List<MetadataValue> mdv = itemService.getMetadata(originItem,
|
List<MetadataValue> mdv = itemService.getMetadata(originItem,
|
||||||
"relationship",
|
"dspace",
|
||||||
"type", null,
|
"entity", "type",
|
||||||
Item.ANY);
|
Item.ANY);
|
||||||
if (!mdv.isEmpty()) {
|
if (!mdv.isEmpty()) {
|
||||||
String relTypeValue = mdv.get(0).getValue();
|
String relTypeValue = mdv.get(0).getValue();
|
||||||
|
@@ -1,106 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.app.itemimport;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.DataLoader;
|
|
||||||
import gr.ekt.bte.core.TransformationEngine;
|
|
||||||
import gr.ekt.bte.dataloader.FileDataLoader;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This class acts as a Service in the procedure to batch import using the Biblio-Transformation-Engine
|
|
||||||
*/
|
|
||||||
public class BTEBatchImportService {
|
|
||||||
|
|
||||||
TransformationEngine transformationEngine;
|
|
||||||
Map<String, DataLoader> dataLoaders = new HashMap<String, DataLoader>();
|
|
||||||
Map<String, String> outputMap = new HashMap<String, String>();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Default constructor
|
|
||||||
*/
|
|
||||||
public BTEBatchImportService() {
|
|
||||||
super();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Setter method for dataLoaders parameter
|
|
||||||
*
|
|
||||||
* @param dataLoaders map of data loaders
|
|
||||||
*/
|
|
||||||
public void setDataLoaders(Map<String, DataLoader> dataLoaders) {
|
|
||||||
this.dataLoaders = dataLoaders;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get data loaders
|
|
||||||
*
|
|
||||||
* @return the map of DataLoaders
|
|
||||||
*/
|
|
||||||
public Map<String, DataLoader> getDataLoaders() {
|
|
||||||
return dataLoaders;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get output map
|
|
||||||
*
|
|
||||||
* @return the outputMapping
|
|
||||||
*/
|
|
||||||
public Map<String, String> getOutputMap() {
|
|
||||||
return outputMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Setter method for the outputMapping
|
|
||||||
*
|
|
||||||
* @param outputMap the output mapping
|
|
||||||
*/
|
|
||||||
public void setOutputMap(Map<String, String> outputMap) {
|
|
||||||
this.outputMap = outputMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get transformation engine
|
|
||||||
*
|
|
||||||
* @return transformation engine
|
|
||||||
*/
|
|
||||||
public TransformationEngine getTransformationEngine() {
|
|
||||||
return transformationEngine;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* set transformation engine
|
|
||||||
*
|
|
||||||
* @param transformationEngine transformation engine
|
|
||||||
*/
|
|
||||||
public void setTransformationEngine(TransformationEngine transformationEngine) {
|
|
||||||
this.transformationEngine = transformationEngine;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Getter of file data loaders
|
|
||||||
*
|
|
||||||
* @return List of file data loaders
|
|
||||||
*/
|
|
||||||
public List<String> getFileDataLoaders() {
|
|
||||||
List<String> result = new ArrayList<String>();
|
|
||||||
|
|
||||||
for (String key : dataLoaders.keySet()) {
|
|
||||||
DataLoader dl = dataLoaders.get(key);
|
|
||||||
if (dl instanceof FileDataLoader) {
|
|
||||||
result.add(key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
@@ -73,7 +73,6 @@ public class ItemImportCLITool {
|
|||||||
Options options = new Options();
|
Options options = new Options();
|
||||||
|
|
||||||
options.addOption("a", "add", false, "add items to DSpace");
|
options.addOption("a", "add", false, "add items to DSpace");
|
||||||
options.addOption("b", "add-bte", false, "add items to DSpace via Biblio-Transformation-Engine (BTE)");
|
|
||||||
options.addOption("r", "replace", false, "replace items in mapfile");
|
options.addOption("r", "replace", false, "replace items in mapfile");
|
||||||
options.addOption("d", "delete", false,
|
options.addOption("d", "delete", false,
|
||||||
"delete items listed in mapfile");
|
"delete items listed in mapfile");
|
||||||
@@ -388,8 +387,6 @@ public class ItemImportCLITool {
|
|||||||
myloader.replaceItems(c, mycollections, sourcedir, mapfile, template);
|
myloader.replaceItems(c, mycollections, sourcedir, mapfile, template);
|
||||||
} else if ("delete".equals(command)) {
|
} else if ("delete".equals(command)) {
|
||||||
myloader.deleteItems(c, mapfile);
|
myloader.deleteItems(c, mapfile);
|
||||||
} else if ("add-bte".equals(command)) {
|
|
||||||
myloader.addBTEItems(c, mycollections, sourcedir, mapfile, template, bteInputType, null);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// complete all transactions
|
// complete all transactions
|
||||||
|
@@ -45,13 +45,6 @@ import javax.xml.parsers.DocumentBuilderFactory;
|
|||||||
import javax.xml.parsers.ParserConfigurationException;
|
import javax.xml.parsers.ParserConfigurationException;
|
||||||
import javax.xml.transform.TransformerException;
|
import javax.xml.transform.TransformerException;
|
||||||
|
|
||||||
import gr.ekt.bte.core.DataLoader;
|
|
||||||
import gr.ekt.bte.core.TransformationEngine;
|
|
||||||
import gr.ekt.bte.core.TransformationResult;
|
|
||||||
import gr.ekt.bte.core.TransformationSpec;
|
|
||||||
import gr.ekt.bte.dataloader.FileDataLoader;
|
|
||||||
import gr.ekt.bteio.generators.DSpaceOutputGenerator;
|
|
||||||
import gr.ekt.bteio.loaders.OAIPMHDataLoader;
|
|
||||||
import org.apache.commons.collections4.ComparatorUtils;
|
import org.apache.commons.collections4.ComparatorUtils;
|
||||||
import org.apache.commons.io.FileDeleteStrategy;
|
import org.apache.commons.io.FileDeleteStrategy;
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
@@ -96,7 +89,6 @@ import org.dspace.eperson.service.EPersonService;
|
|||||||
import org.dspace.eperson.service.GroupService;
|
import org.dspace.eperson.service.GroupService;
|
||||||
import org.dspace.handle.service.HandleService;
|
import org.dspace.handle.service.HandleService;
|
||||||
import org.dspace.services.ConfigurationService;
|
import org.dspace.services.ConfigurationService;
|
||||||
import org.dspace.utils.DSpace;
|
|
||||||
import org.dspace.workflow.WorkflowItem;
|
import org.dspace.workflow.WorkflowItem;
|
||||||
import org.dspace.workflow.WorkflowService;
|
import org.dspace.workflow.WorkflowService;
|
||||||
import org.springframework.beans.factory.InitializingBean;
|
import org.springframework.beans.factory.InitializingBean;
|
||||||
@@ -200,100 +192,6 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* In this method, the BTE is instantiated. THe workflow generates the DSpace files
|
|
||||||
* necessary for the upload, and the default item import method is called
|
|
||||||
*
|
|
||||||
* @param c The contect
|
|
||||||
* @param mycollections The collections the items are inserted to
|
|
||||||
* @param sourceDir The filepath to the file to read data from
|
|
||||||
* @param mapFile The filepath to mapfile to be generated
|
|
||||||
* @param template whether to use collection template item as starting point
|
|
||||||
* @param inputType The type of the input data (bibtex, csv, etc.)
|
|
||||||
* @param workingDir The path to create temporary files (for command line or UI based)
|
|
||||||
* @throws Exception if error occurs
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public void addBTEItems(Context c, List<Collection> mycollections,
|
|
||||||
String sourceDir, String mapFile, boolean template, String inputType, String workingDir)
|
|
||||||
throws Exception {
|
|
||||||
//Determine the folder where BTE will output the results
|
|
||||||
String outputFolder = null;
|
|
||||||
if (workingDir == null) { //This indicates a command line import, create a random path
|
|
||||||
File importDir = new File(configurationService.getProperty("org.dspace.app.batchitemimport.work.dir"));
|
|
||||||
if (!importDir.exists()) {
|
|
||||||
boolean success = importDir.mkdir();
|
|
||||||
if (!success) {
|
|
||||||
log.info("Cannot create batch import directory!");
|
|
||||||
throw new Exception("Cannot create batch import directory!");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
//Get a random folder in case two admins batch import data at the same time
|
|
||||||
outputFolder = importDir + File.separator + generateRandomFilename(true);
|
|
||||||
} else { //This indicates a UI import, working dir is preconfigured
|
|
||||||
outputFolder = workingDir;
|
|
||||||
}
|
|
||||||
|
|
||||||
BTEBatchImportService dls = new DSpace().getSingletonService(BTEBatchImportService.class);
|
|
||||||
DataLoader dataLoader = dls.getDataLoaders().get(inputType);
|
|
||||||
Map<String, String> outputMap = dls.getOutputMap();
|
|
||||||
TransformationEngine te = dls.getTransformationEngine();
|
|
||||||
|
|
||||||
if (dataLoader == null) {
|
|
||||||
System.out.println(
|
|
||||||
"ERROR: The key used in -i parameter must match a valid DataLoader in the BTE Spring XML " +
|
|
||||||
"configuration file!");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (outputMap == null) {
|
|
||||||
System.out.println(
|
|
||||||
"ERROR: The key used in -i parameter must match a valid outputMapping in the BTE Spring XML " +
|
|
||||||
"configuration file!");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (dataLoader instanceof FileDataLoader) {
|
|
||||||
FileDataLoader fdl = (FileDataLoader) dataLoader;
|
|
||||||
if (!StringUtils.isBlank(sourceDir)) {
|
|
||||||
System.out.println(
|
|
||||||
"INFO: Dataloader will load data from the file specified in the command prompt (and not from the " +
|
|
||||||
"Spring XML configuration file)");
|
|
||||||
fdl.setFilename(sourceDir);
|
|
||||||
}
|
|
||||||
} else if (dataLoader instanceof OAIPMHDataLoader) {
|
|
||||||
OAIPMHDataLoader fdl = (OAIPMHDataLoader) dataLoader;
|
|
||||||
System.out.println(sourceDir);
|
|
||||||
if (!StringUtils.isBlank(sourceDir)) {
|
|
||||||
System.out.println(
|
|
||||||
"INFO: Dataloader will load data from the address specified in the command prompt (and not from " +
|
|
||||||
"the Spring XML configuration file)");
|
|
||||||
fdl.setServerAddress(sourceDir);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (dataLoader != null) {
|
|
||||||
System.out.println("INFO: Dataloader " + dataLoader.toString() + " will be used for the import!");
|
|
||||||
|
|
||||||
te.setDataLoader(dataLoader);
|
|
||||||
|
|
||||||
DSpaceOutputGenerator outputGenerator = new DSpaceOutputGenerator(outputMap);
|
|
||||||
outputGenerator.setOutputDirectory(outputFolder);
|
|
||||||
|
|
||||||
te.setOutputGenerator(outputGenerator);
|
|
||||||
|
|
||||||
try {
|
|
||||||
TransformationResult res = te.transform(new TransformationSpec());
|
|
||||||
List<String> output = res.getOutput();
|
|
||||||
outputGenerator.writeOutput(output);
|
|
||||||
} catch (Exception e) {
|
|
||||||
System.err.println("Exception");
|
|
||||||
e.printStackTrace();
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
addItems(c, mycollections, outputFolder, mapFile, template);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void addItemsAtomic(Context c, List<Collection> mycollections, String sourceDir, String mapFile,
|
public void addItemsAtomic(Context c, List<Collection> mycollections, String sourceDir, String mapFile,
|
||||||
boolean template) throws Exception {
|
boolean template) throws Exception {
|
||||||
@@ -1739,9 +1637,6 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
|||||||
if (theInputType.equals("saf") || theInputType
|
if (theInputType.equals("saf") || theInputType
|
||||||
.equals("safupload")) { //In case of Simple Archive Format import
|
.equals("safupload")) { //In case of Simple Archive Format import
|
||||||
addItems(context, finalCollections, dataDir, mapFilePath, template);
|
addItems(context, finalCollections, dataDir, mapFilePath, template);
|
||||||
} else { // For all other imports (via BTE)
|
|
||||||
addBTEItems(context, finalCollections, theFilePath, mapFilePath, useTemplateItem, theInputType,
|
|
||||||
dataDir);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// email message letting user know the file is ready for
|
// email message letting user know the file is ready for
|
||||||
|
@@ -183,21 +183,6 @@ public interface ItemImportService {
|
|||||||
*/
|
*/
|
||||||
public void deleteItems(Context c, String mapfile) throws Exception;
|
public void deleteItems(Context c, String mapfile) throws Exception;
|
||||||
|
|
||||||
/**
|
|
||||||
* Add items
|
|
||||||
*
|
|
||||||
* @param c DSpace Context
|
|
||||||
* @param mycollections List of Collections
|
|
||||||
* @param sourcedir source directory
|
|
||||||
* @param mapfile map file
|
|
||||||
* @param template whether to use template item
|
|
||||||
* @param bteInputType The input type of the data (bibtex, csv, etc.), in case of local file
|
|
||||||
* @param workingDir working directory
|
|
||||||
* @throws Exception if error
|
|
||||||
*/
|
|
||||||
public void addBTEItems(Context c, List<Collection> mycollections, String sourcedir, String mapfile,
|
|
||||||
boolean template, String bteInputType, String workingDir) throws Exception;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get temporary work directory
|
* Get temporary work directory
|
||||||
*
|
*
|
||||||
|
@@ -12,6 +12,7 @@ import java.io.InputStream;
|
|||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import javax.annotation.PostConstruct;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
@@ -23,12 +24,12 @@ import org.apache.http.client.methods.HttpGet;
|
|||||||
import org.apache.http.client.utils.URIBuilder;
|
import org.apache.http.client.utils.URIBuilder;
|
||||||
import org.apache.http.impl.client.CloseableHttpClient;
|
import org.apache.http.impl.client.CloseableHttpClient;
|
||||||
import org.apache.http.impl.client.HttpClientBuilder;
|
import org.apache.http.impl.client.HttpClientBuilder;
|
||||||
|
import org.apache.logging.log4j.LogManager;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.dspace.app.sherpa.v2.SHERPAPublisherResponse;
|
import org.dspace.app.sherpa.v2.SHERPAPublisherResponse;
|
||||||
import org.dspace.app.sherpa.v2.SHERPAResponse;
|
import org.dspace.app.sherpa.v2.SHERPAResponse;
|
||||||
import org.dspace.app.sherpa.v2.SHERPAUtils;
|
import org.dspace.app.sherpa.v2.SHERPAUtils;
|
||||||
import org.dspace.services.ConfigurationService;
|
import org.dspace.services.ConfigurationService;
|
||||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -47,11 +48,11 @@ public class SHERPAService {
|
|||||||
private int maxNumberOfTries;
|
private int maxNumberOfTries;
|
||||||
private long sleepBetweenTimeouts;
|
private long sleepBetweenTimeouts;
|
||||||
private int timeout = 5000;
|
private int timeout = 5000;
|
||||||
private String endpoint = "https://v2.sherpa.ac.uk/cgi/retrieve";
|
private String endpoint = null;
|
||||||
private String apiKey = null;
|
private String apiKey = null;
|
||||||
|
|
||||||
/** log4j category */
|
/** log4j category */
|
||||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(SHERPAService.class);
|
private static final Logger log = LogManager.getLogger(SHERPAService.class);
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
ConfigurationService configurationService;
|
ConfigurationService configurationService;
|
||||||
@@ -60,14 +61,6 @@ public class SHERPAService {
|
|||||||
* Create a new HTTP builder with sensible defaults in constructor
|
* Create a new HTTP builder with sensible defaults in constructor
|
||||||
*/
|
*/
|
||||||
public SHERPAService() {
|
public SHERPAService() {
|
||||||
// Set configuration service
|
|
||||||
configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
|
|
||||||
|
|
||||||
// Get endoint and API key from configuration
|
|
||||||
endpoint = configurationService.getProperty("sherpa.romeo.url",
|
|
||||||
"https://v2.sherpa.ac.uk/cgi/retrieve");
|
|
||||||
apiKey = configurationService.getProperty("sherpa.romeo.apikey");
|
|
||||||
|
|
||||||
HttpClientBuilder builder = HttpClientBuilder.create();
|
HttpClientBuilder builder = HttpClientBuilder.create();
|
||||||
// httpclient 4.3+ doesn't appear to have any sensible defaults any more. Setting conservative defaults as
|
// httpclient 4.3+ doesn't appear to have any sensible defaults any more. Setting conservative defaults as
|
||||||
// not to hammer the SHERPA service too much.
|
// not to hammer the SHERPA service too much.
|
||||||
@@ -77,6 +70,17 @@ public class SHERPAService {
|
|||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Complete initialization of the Bean.
|
||||||
|
*/
|
||||||
|
@PostConstruct
|
||||||
|
private void init() {
|
||||||
|
// Get endoint and API key from configuration
|
||||||
|
endpoint = configurationService.getProperty("sherpa.romeo.url",
|
||||||
|
"https://v2.sherpa.ac.uk/cgi/retrieve");
|
||||||
|
apiKey = configurationService.getProperty("sherpa.romeo.apikey");
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Search the SHERPA v2 API for a journal policy data using the supplied ISSN.
|
* Search the SHERPA v2 API for a journal policy data using the supplied ISSN.
|
||||||
* If the API key is missing, or the HTTP response is non-OK or does not complete
|
* If the API key is missing, or the HTTP response is non-OK or does not complete
|
||||||
|
@@ -30,9 +30,6 @@ public class SHERPAPermittedVersion {
|
|||||||
// Version (submitted, accepted, published)
|
// Version (submitted, accepted, published)
|
||||||
private String articleVersion;
|
private String articleVersion;
|
||||||
|
|
||||||
// Version label
|
|
||||||
private String articleVersionLabel;
|
|
||||||
|
|
||||||
// Option number
|
// Option number
|
||||||
private int option;
|
private int option;
|
||||||
|
|
||||||
@@ -108,11 +105,4 @@ public class SHERPAPermittedVersion {
|
|||||||
this.option = option;
|
this.option = option;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getArticleVersionLabel() {
|
|
||||||
return articleVersionLabel;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setArticleVersionLabel(String articleVersionLabel) {
|
|
||||||
this.articleVersionLabel = articleVersionLabel;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@@ -91,7 +91,7 @@ public class SHERPAPublisherResponse {
|
|||||||
for (int itemIndex = 0; itemIndex < items.length(); itemIndex++) {
|
for (int itemIndex = 0; itemIndex < items.length(); itemIndex++) {
|
||||||
SHERPAPublisher sherpaPublisher = new SHERPAPublisher();
|
SHERPAPublisher sherpaPublisher = new SHERPAPublisher();
|
||||||
|
|
||||||
JSONObject item = items.getJSONObject(0);
|
JSONObject item = items.getJSONObject(itemIndex);
|
||||||
|
|
||||||
// Parse system metadata (per-item / result information)
|
// Parse system metadata (per-item / result information)
|
||||||
if (item.has("system_metadata")) {
|
if (item.has("system_metadata")) {
|
||||||
|
@@ -18,7 +18,6 @@ import java.util.TreeMap;
|
|||||||
|
|
||||||
import org.apache.logging.log4j.LogManager;
|
import org.apache.logging.log4j.LogManager;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.dspace.core.I18nUtil;
|
|
||||||
import org.json.JSONArray;
|
import org.json.JSONArray;
|
||||||
import org.json.JSONException;
|
import org.json.JSONException;
|
||||||
import org.json.JSONObject;
|
import org.json.JSONObject;
|
||||||
@@ -99,7 +98,7 @@ public class SHERPAResponse {
|
|||||||
SHERPAPublisher sherpaPublisher = new SHERPAPublisher();
|
SHERPAPublisher sherpaPublisher = new SHERPAPublisher();
|
||||||
SHERPAJournal sherpaJournal = new SHERPAJournal();
|
SHERPAJournal sherpaJournal = new SHERPAJournal();
|
||||||
|
|
||||||
JSONObject item = items.getJSONObject(0);
|
JSONObject item = items.getJSONObject(itemIndex);
|
||||||
|
|
||||||
// Parse system metadata (per-item / result information)
|
// Parse system metadata (per-item / result information)
|
||||||
if (item.has("system_metadata")) {
|
if (item.has("system_metadata")) {
|
||||||
@@ -417,17 +416,6 @@ public class SHERPAResponse {
|
|||||||
log.debug("Added allowed version: " + articleVersion + " to list");
|
log.debug("Added allowed version: " + articleVersion + " to list");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add labels for this particular article version
|
|
||||||
if ("submitted".equals(articleVersion)) {
|
|
||||||
versionLabel = I18nUtil.getMessage("jsp.sherpa.submitted-version-label");
|
|
||||||
} else if ("accepted".equals(articleVersion)) {
|
|
||||||
versionLabel = I18nUtil.getMessage("jsp.sherpa.accepted-version-label");
|
|
||||||
} else if ("published".equals(articleVersion)) {
|
|
||||||
versionLabel = I18nUtil.getMessage("jsp.sherpa.published-version-label");
|
|
||||||
}
|
|
||||||
// Set the article version label based on the i18n text set above
|
|
||||||
permittedVersion.setArticleVersionLabel(versionLabel);
|
|
||||||
|
|
||||||
// These are now child arrays, in old API they were explicit like
|
// These are now child arrays, in old API they were explicit like
|
||||||
// "preprint restrictions", etc., and just contained text rather than data
|
// "preprint restrictions", etc., and just contained text rather than data
|
||||||
if (permitted.has("conditions")) {
|
if (permitted.has("conditions")) {
|
||||||
|
@@ -141,6 +141,15 @@ public class LDAPAuthentication
|
|||||||
// Prevents anonymous users from being added to this group, and the second check
|
// Prevents anonymous users from being added to this group, and the second check
|
||||||
// ensures they are LDAP users
|
// ensures they are LDAP users
|
||||||
try {
|
try {
|
||||||
|
// without a logged in user, this method should return an empty list
|
||||||
|
if (context.getCurrentUser() == null) {
|
||||||
|
return Collections.EMPTY_LIST;
|
||||||
|
}
|
||||||
|
// if the logged in user does not have a netid, it's not an LDAP user
|
||||||
|
// and this method should return an empty list
|
||||||
|
if (context.getCurrentUser().getNetid() == null) {
|
||||||
|
return Collections.EMPTY_LIST;
|
||||||
|
}
|
||||||
if (!context.getCurrentUser().getNetid().equals("")) {
|
if (!context.getCurrentUser().getNetid().equals("")) {
|
||||||
String groupName = configurationService.getProperty("authentication-ldap.login.specialgroup");
|
String groupName = configurationService.getProperty("authentication-ldap.login.specialgroup");
|
||||||
if ((groupName != null) && (!groupName.trim().equals(""))) {
|
if ((groupName != null) && (!groupName.trim().equals(""))) {
|
||||||
@@ -681,7 +690,7 @@ public class LDAPAuthentication
|
|||||||
if (StringUtils.isNotBlank(dn)) {
|
if (StringUtils.isNotBlank(dn)) {
|
||||||
System.out.println("dn:" + dn);
|
System.out.println("dn:" + dn);
|
||||||
int i = 1;
|
int i = 1;
|
||||||
String groupMap = configurationService.getProperty("authentication-ldap", "login.groupmap." + i);
|
String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + i);
|
||||||
|
|
||||||
boolean cmp;
|
boolean cmp;
|
||||||
|
|
||||||
@@ -721,7 +730,7 @@ public class LDAPAuthentication
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
groupMap = configurationService.getProperty("authentication-ldap", "login.groupmap." + ++i);
|
groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + ++i);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -0,0 +1,365 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.authority.orcid;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
import org.apache.commons.lang.ObjectUtils;
|
||||||
|
import org.apache.commons.lang.StringUtils;
|
||||||
|
import org.apache.solr.common.SolrDocument;
|
||||||
|
import org.apache.solr.common.SolrInputDocument;
|
||||||
|
import org.dspace.authority.AuthorityValue;
|
||||||
|
import org.dspace.authority.AuthorityValueServiceImpl;
|
||||||
|
import org.dspace.authority.PersonAuthorityValue;
|
||||||
|
import org.dspace.utils.DSpace;
|
||||||
|
import org.orcid.jaxb.model.v3.release.record.Keyword;
|
||||||
|
import org.orcid.jaxb.model.v3.release.record.Name;
|
||||||
|
import org.orcid.jaxb.model.v3.release.record.Person;
|
||||||
|
import org.orcid.jaxb.model.v3.release.record.PersonExternalIdentifier;
|
||||||
|
import org.orcid.jaxb.model.v3.release.record.PersonExternalIdentifiers;
|
||||||
|
import org.orcid.jaxb.model.v3.release.record.ResearcherUrl;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An {@link AuthorityValue} encapsulating information retrieved from ORCID
|
||||||
|
*
|
||||||
|
* @author Jonas Van Goolen (jonas at atmire dot com)
|
||||||
|
*/
|
||||||
|
public class Orcidv3AuthorityValue extends PersonAuthorityValue {
|
||||||
|
|
||||||
|
/*
|
||||||
|
* The ORCID identifier
|
||||||
|
*/
|
||||||
|
private String orcid_id;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Map containing key-value pairs filled in by "setValues(Person person)".
|
||||||
|
* This represents all dynamic information of the object.
|
||||||
|
*/
|
||||||
|
private Map<String, List<String>> otherMetadata = new HashMap<String, List<String>>();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The syntax that the ORCID id needs to conform to
|
||||||
|
*/
|
||||||
|
public static final String ORCID_ID_SYNTAX = "\\d{4}-\\d{4}-\\d{4}-(\\d{3}X|\\d{4})";
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates an instance of Orcidv3AuthorityValue with only uninitialized fields.
|
||||||
|
* This is meant to be filled in with values from an existing record.
|
||||||
|
* To create a brand new Orcidv3AuthorityValue, use create()
|
||||||
|
*/
|
||||||
|
public Orcidv3AuthorityValue() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public Orcidv3AuthorityValue(SolrDocument document) {
|
||||||
|
super(document);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public String getOrcid_id() {
|
||||||
|
return orcid_id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setOrcid_id(String orcid_id) {
|
||||||
|
this.orcid_id = orcid_id;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create an empty authority.
|
||||||
|
* @return OrcidAuthorityValue
|
||||||
|
*/
|
||||||
|
public static Orcidv3AuthorityValue create() {
|
||||||
|
Orcidv3AuthorityValue orcidAuthorityValue = new Orcidv3AuthorityValue();
|
||||||
|
orcidAuthorityValue.setId(UUID.randomUUID().toString());
|
||||||
|
orcidAuthorityValue.updateLastModifiedDate();
|
||||||
|
orcidAuthorityValue.setCreationDate(new Date());
|
||||||
|
return orcidAuthorityValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create an authority based on a given orcid bio
|
||||||
|
* @return OrcidAuthorityValue
|
||||||
|
*/
|
||||||
|
public static Orcidv3AuthorityValue create(Person person) {
|
||||||
|
if (person == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
Orcidv3AuthorityValue authority = Orcidv3AuthorityValue.create();
|
||||||
|
|
||||||
|
authority.setValues(person);
|
||||||
|
|
||||||
|
return authority;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize this instance based on a Person object
|
||||||
|
* @param person Person
|
||||||
|
*/
|
||||||
|
protected void setValues(Person person) {
|
||||||
|
Name name = person.getName();
|
||||||
|
|
||||||
|
if (!StringUtils.equals(name.getPath(), this.getOrcid_id())) {
|
||||||
|
this.setOrcid_id(name.getPath());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!StringUtils.equals(name.getFamilyName().getContent(), this.getLastName())) {
|
||||||
|
this.setLastName(name.getFamilyName().getContent());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!StringUtils.equals(name.getGivenNames().getContent(), this.getFirstName())) {
|
||||||
|
this.setFirstName(name.getGivenNames().getContent());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (name.getCreditName() != null && StringUtils.isNotBlank(name.getCreditName().getContent())) {
|
||||||
|
if (!this.getNameVariants().contains(name.getCreditName().getContent())) {
|
||||||
|
this.addNameVariant(name.getCreditName().getContent());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (person.getKeywords() != null) {
|
||||||
|
for (Keyword keyword : person.getKeywords().getKeywords()) {
|
||||||
|
if (this.isNewMetadata("keyword", keyword.getContent())) {
|
||||||
|
this.addOtherMetadata("keyword", keyword.getContent());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
PersonExternalIdentifiers externalIdentifiers = person.getExternalIdentifiers();
|
||||||
|
if (externalIdentifiers != null) {
|
||||||
|
for (PersonExternalIdentifier externalIdentifier : externalIdentifiers.getExternalIdentifiers()) {
|
||||||
|
if (this.isNewMetadata("external_identifier", externalIdentifier.getValue())) {
|
||||||
|
this.addOtherMetadata("external_identifier", externalIdentifier.getValue());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (person.getResearcherUrls() != null) {
|
||||||
|
for (ResearcherUrl researcherUrl : person.getResearcherUrls().getResearcherUrls()) {
|
||||||
|
if (this.isNewMetadata("researcher_url", researcherUrl.getUrl().getValue())) {
|
||||||
|
this.addOtherMetadata("researcher_url", researcherUrl.getUrl().getValue());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
if (person.getBiography() != null) {
|
||||||
|
if (this.isNewMetadata("biography", person.getBiography().getContent())) {
|
||||||
|
this.addOtherMetadata("biography", person.getBiography().getContent());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.setValue(this.getName());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Makes an instance of the AuthorityValue with the given information.
|
||||||
|
* @param info string info
|
||||||
|
* @return AuthorityValue
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public AuthorityValue newInstance(String info) {
|
||||||
|
AuthorityValue authorityValue = null;
|
||||||
|
if (StringUtils.isNotBlank(info)) {
|
||||||
|
Orcidv3SolrAuthorityImpl orcid = new DSpace().getServiceManager().getServiceByName("AuthoritySource",
|
||||||
|
Orcidv3SolrAuthorityImpl.class);
|
||||||
|
authorityValue = orcid.queryAuthorityID(info);
|
||||||
|
} else {
|
||||||
|
authorityValue = this.create();
|
||||||
|
}
|
||||||
|
return authorityValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setValue(String value) {
|
||||||
|
super.setValue(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check to see if the provided label / data pair is already present in the "otherMetadata" or not
|
||||||
|
* */
|
||||||
|
public boolean isNewMetadata(String label, String data) {
|
||||||
|
List<String> strings = getOtherMetadata().get(label);
|
||||||
|
boolean update;
|
||||||
|
if (strings == null) {
|
||||||
|
update = StringUtils.isNotBlank(data);
|
||||||
|
} else {
|
||||||
|
update = !strings.contains(data);
|
||||||
|
}
|
||||||
|
return update;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add additional metadata to the otherMetadata map*/
|
||||||
|
public void addOtherMetadata(String label, String data) {
|
||||||
|
List<String> strings = otherMetadata.get(label);
|
||||||
|
if (strings == null) {
|
||||||
|
strings = new ArrayList<>();
|
||||||
|
}
|
||||||
|
strings.add(data);
|
||||||
|
otherMetadata.put(label, strings);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map<String, List<String>> getOtherMetadata() {
|
||||||
|
return otherMetadata;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate a solr record from this instance
|
||||||
|
* @return SolrInputDocument
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public SolrInputDocument getSolrInputDocument() {
|
||||||
|
SolrInputDocument doc = super.getSolrInputDocument();
|
||||||
|
if (StringUtils.isNotBlank(getOrcid_id())) {
|
||||||
|
doc.addField("orcid_id", getOrcid_id());
|
||||||
|
}
|
||||||
|
|
||||||
|
for (String t : otherMetadata.keySet()) {
|
||||||
|
List<String> data = otherMetadata.get(t);
|
||||||
|
for (String data_entry : data) {
|
||||||
|
doc.addField("label_" + t, data_entry);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return doc;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Information that can be used the choice ui
|
||||||
|
* @return map
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public Map<String, String> choiceSelectMap() {
|
||||||
|
|
||||||
|
Map<String, String> map = super.choiceSelectMap();
|
||||||
|
|
||||||
|
String orcid_id = getOrcid_id();
|
||||||
|
if (StringUtils.isNotBlank(orcid_id)) {
|
||||||
|
map.put("orcid", orcid_id);
|
||||||
|
}
|
||||||
|
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getAuthorityType() {
|
||||||
|
return "orcid";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Provides a string that will allow this AuthorityType to be recognized and
|
||||||
|
* provides information to create a new instance to be created using public
|
||||||
|
* Orcidv3AuthorityValue newInstance(String info).
|
||||||
|
*
|
||||||
|
* @return see
|
||||||
|
* {@link org.dspace.authority.service.AuthorityValueService#GENERATE
|
||||||
|
* AuthorityValueService.GENERATE}
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String generateString() {
|
||||||
|
String generateString = AuthorityValueServiceImpl.GENERATE + getAuthorityType()
|
||||||
|
+ AuthorityValueServiceImpl.SPLIT;
|
||||||
|
if (StringUtils.isNotBlank(getOrcid_id())) {
|
||||||
|
generateString += getOrcid_id();
|
||||||
|
}
|
||||||
|
return generateString;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (this == o) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (o == null || getClass() != o.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
Orcidv3AuthorityValue that = (Orcidv3AuthorityValue) o;
|
||||||
|
|
||||||
|
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return orcid_id != null ? orcid_id.hashCode() : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The regular equals() only checks if both AuthorityValues describe the same authority.
|
||||||
|
* This method checks if the AuthorityValues have different information
|
||||||
|
* E.g. it is used to decide when lastModified should be updated.
|
||||||
|
* @param o object
|
||||||
|
* @return true or false
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public boolean hasTheSameInformationAs(Object o) {
|
||||||
|
if (this == o) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (o == null || getClass() != o.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (!super.hasTheSameInformationAs(o)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
Orcidv3AuthorityValue that = (Orcidv3AuthorityValue) o;
|
||||||
|
|
||||||
|
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (String key : otherMetadata.keySet()) {
|
||||||
|
if (otherMetadata.get(key) != null) {
|
||||||
|
List<String> metadata = otherMetadata.get(key);
|
||||||
|
List<String> otherMetadata = that.otherMetadata.get(key);
|
||||||
|
if (otherMetadata == null) {
|
||||||
|
return false;
|
||||||
|
} else {
|
||||||
|
HashSet<String> metadataSet = new HashSet<String>(metadata);
|
||||||
|
HashSet<String> otherMetadataSet = new HashSet<String>(otherMetadata);
|
||||||
|
if (!metadataSet.equals(otherMetadataSet)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (that.otherMetadata.get(key) != null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setValues(SolrDocument document) {
|
||||||
|
super.setValues(document);
|
||||||
|
this.orcid_id = ObjectUtils.toString(document.getFieldValue("orcid_id"));
|
||||||
|
for (String key : document.getFieldNames()) {
|
||||||
|
if (key.startsWith("label_")) {
|
||||||
|
String keyInternalMap = key.substring(key.indexOf("_") + 1);
|
||||||
|
Collection<Object> valuesSolr = document.getFieldValues(key);
|
||||||
|
for (Object valueInternal : valuesSolr) {
|
||||||
|
addOtherMetadata(keyInternalMap, (String) valueInternal);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,217 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.authority.orcid;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
|
import java.net.URLEncoder;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.LinkedList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.commons.lang.StringUtils;
|
||||||
|
import org.apache.http.HttpResponse;
|
||||||
|
import org.apache.http.client.HttpClient;
|
||||||
|
import org.apache.http.client.methods.HttpPost;
|
||||||
|
import org.apache.http.impl.client.HttpClientBuilder;
|
||||||
|
import org.apache.log4j.Logger;
|
||||||
|
import org.dspace.authority.AuthorityValue;
|
||||||
|
import org.dspace.authority.SolrAuthorityInterface;
|
||||||
|
import org.dspace.external.OrcidRestConnector;
|
||||||
|
import org.dspace.external.provider.orcid.xml.XMLtoBio;
|
||||||
|
import org.json.JSONObject;
|
||||||
|
import org.orcid.jaxb.model.v3.release.common.OrcidIdentifier;
|
||||||
|
import org.orcid.jaxb.model.v3.release.record.Person;
|
||||||
|
import org.orcid.jaxb.model.v3.release.search.Result;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This class contains all methods for retrieving "Person" objects calling the ORCID (version 3) endpoints.
|
||||||
|
* Additionally, this can also create AuthorityValues based on these returned Person objects
|
||||||
|
*
|
||||||
|
* @author Jonas Van Goolen (jonas at atmire dot com)
|
||||||
|
* @author Andrea Bollini (andrea.bollini at 4science.it)
|
||||||
|
*/
|
||||||
|
public class Orcidv3SolrAuthorityImpl implements SolrAuthorityInterface {
|
||||||
|
|
||||||
|
private static Logger log = Logger.getLogger(Orcidv3SolrAuthorityImpl.class);
|
||||||
|
|
||||||
|
private OrcidRestConnector orcidRestConnector;
|
||||||
|
private String OAUTHUrl;
|
||||||
|
private String clientId;
|
||||||
|
private String clientSecret;
|
||||||
|
|
||||||
|
private String accessToken;
|
||||||
|
|
||||||
|
public void setOAUTHUrl(String oAUTHUrl) {
|
||||||
|
OAUTHUrl = oAUTHUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setClientId(String clientId) {
|
||||||
|
this.clientId = clientId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setClientSecret(String clientSecret) {
|
||||||
|
this.clientSecret = clientSecret;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize the accessToken that is required for all subsequent calls to ORCID
|
||||||
|
*/
|
||||||
|
public void init() {
|
||||||
|
if (StringUtils.isBlank(accessToken)
|
||||||
|
&& StringUtils.isNotBlank(clientSecret)
|
||||||
|
&& StringUtils.isNotBlank(clientId)
|
||||||
|
&& StringUtils.isNotBlank(OAUTHUrl)) {
|
||||||
|
String authenticationParameters = "?client_id=" + clientId +
|
||||||
|
"&client_secret=" + clientSecret +
|
||||||
|
"&scope=/read-public&grant_type=client_credentials";
|
||||||
|
try {
|
||||||
|
HttpPost httpPost = new HttpPost(OAUTHUrl + authenticationParameters);
|
||||||
|
httpPost.addHeader("Accept", "application/json");
|
||||||
|
httpPost.addHeader("Content-Type", "application/x-www-form-urlencoded");
|
||||||
|
|
||||||
|
HttpClient httpClient = HttpClientBuilder.create().build();
|
||||||
|
HttpResponse getResponse = httpClient.execute(httpPost);
|
||||||
|
|
||||||
|
JSONObject responseObject = null;
|
||||||
|
try (InputStream is = getResponse.getEntity().getContent();
|
||||||
|
BufferedReader streamReader = new BufferedReader(new InputStreamReader(is, "UTF-8"))) {
|
||||||
|
String inputStr;
|
||||||
|
while ((inputStr = streamReader.readLine()) != null && responseObject == null) {
|
||||||
|
if (inputStr.startsWith("{") && inputStr.endsWith("}") && inputStr.contains("access_token")) {
|
||||||
|
try {
|
||||||
|
responseObject = new JSONObject(inputStr);
|
||||||
|
} catch (Exception e) {
|
||||||
|
//Not as valid as I'd hoped, move along
|
||||||
|
responseObject = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (responseObject != null && responseObject.has("access_token")) {
|
||||||
|
accessToken = (String) responseObject.get("access_token");
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new RuntimeException("Error during initialization of the Orcid connector", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setOrcidRestConnector(OrcidRestConnector orcidRestConnector) {
|
||||||
|
this.orcidRestConnector = orcidRestConnector;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Makes an instance of the AuthorityValue with the given information.
|
||||||
|
* @param text search string
|
||||||
|
* @return List<AuthorityValue>
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public List<AuthorityValue> queryAuthorities(String text, int max) {
|
||||||
|
init();
|
||||||
|
List<Person> bios = queryBio(text, max);
|
||||||
|
List<AuthorityValue> result = new ArrayList<>();
|
||||||
|
for (Person person : bios) {
|
||||||
|
AuthorityValue orcidAuthorityValue = Orcidv3AuthorityValue.create(person);
|
||||||
|
if (orcidAuthorityValue != null) {
|
||||||
|
result.add(orcidAuthorityValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create an AuthorityValue from a Person retrieved using the given orcid identifier.
|
||||||
|
* @param id orcid identifier
|
||||||
|
* @return AuthorityValue
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public AuthorityValue queryAuthorityID(String id) {
|
||||||
|
init();
|
||||||
|
Person person = getBio(id);
|
||||||
|
AuthorityValue valueFromPerson = Orcidv3AuthorityValue.create(person);
|
||||||
|
return valueFromPerson;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve a Person object based on a given orcid identifier
|
||||||
|
* @param id orcid identifier
|
||||||
|
* @return Person
|
||||||
|
*/
|
||||||
|
public Person getBio(String id) {
|
||||||
|
log.debug("getBio called with ID=" + id);
|
||||||
|
if (!isValid(id)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
init();
|
||||||
|
InputStream bioDocument = orcidRestConnector.get(id + ((id.endsWith("/person")) ? "" : "/person"), accessToken);
|
||||||
|
XMLtoBio converter = new XMLtoBio();
|
||||||
|
Person person = converter.convertSinglePerson(bioDocument);
|
||||||
|
return person;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve a list of Person objects.
|
||||||
|
* @param text search string
|
||||||
|
* @param start offset to use
|
||||||
|
* @param rows how many rows to return
|
||||||
|
* @return List<Person>
|
||||||
|
*/
|
||||||
|
public List<Person> queryBio(String text, int start, int rows) {
|
||||||
|
init();
|
||||||
|
if (rows > 100) {
|
||||||
|
throw new IllegalArgumentException("The maximum number of results to retrieve cannot exceed 100.");
|
||||||
|
}
|
||||||
|
|
||||||
|
String searchPath = "search?q=" + URLEncoder.encode(text) + "&start=" + start + "&rows=" + rows;
|
||||||
|
log.debug("queryBio searchPath=" + searchPath + " accessToken=" + accessToken);
|
||||||
|
InputStream bioDocument = orcidRestConnector.get(searchPath, accessToken);
|
||||||
|
XMLtoBio converter = new XMLtoBio();
|
||||||
|
List<Result> results = converter.convert(bioDocument);
|
||||||
|
List<Person> bios = new LinkedList<>();
|
||||||
|
for (Result result : results) {
|
||||||
|
OrcidIdentifier orcidIdentifier = result.getOrcidIdentifier();
|
||||||
|
if (orcidIdentifier != null) {
|
||||||
|
log.debug("Found OrcidId=" + orcidIdentifier.toString());
|
||||||
|
String orcid = orcidIdentifier.getPath();
|
||||||
|
Person bio = getBio(orcid);
|
||||||
|
if (bio != null) {
|
||||||
|
bios.add(bio);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
bioDocument.close();
|
||||||
|
} catch (IOException e) {
|
||||||
|
log.error(e.getMessage(), e);
|
||||||
|
}
|
||||||
|
return bios;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve a list of Person objects.
|
||||||
|
* @param text search string
|
||||||
|
* @param max how many rows to return
|
||||||
|
* @return List<Person>
|
||||||
|
*/
|
||||||
|
public List<Person> queryBio(String text, int max) {
|
||||||
|
return queryBio(text, 0, max);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check to see if the provided text has the correct ORCID syntax. Since only
|
||||||
|
* searching on ORCID id is allowed, this way, we filter out any queries that
|
||||||
|
* would return a blank result anyway
|
||||||
|
*/
|
||||||
|
private boolean isValid(String text) {
|
||||||
|
return StringUtils.isNotBlank(text) && text.matches(Orcidv3AuthorityValue.ORCID_ID_SYNTAX);
|
||||||
|
}
|
||||||
|
}
|
@@ -17,6 +17,8 @@ import java.util.UUID;
|
|||||||
|
|
||||||
import org.apache.commons.collections4.CollectionUtils;
|
import org.apache.commons.collections4.CollectionUtils;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.logging.log4j.LogManager;
|
||||||
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.dspace.authorize.service.AuthorizeService;
|
import org.dspace.authorize.service.AuthorizeService;
|
||||||
import org.dspace.authorize.service.ResourcePolicyService;
|
import org.dspace.authorize.service.ResourcePolicyService;
|
||||||
import org.dspace.content.Bitstream;
|
import org.dspace.content.Bitstream;
|
||||||
@@ -30,6 +32,13 @@ import org.dspace.content.service.BitstreamService;
|
|||||||
import org.dspace.content.service.WorkspaceItemService;
|
import org.dspace.content.service.WorkspaceItemService;
|
||||||
import org.dspace.core.Constants;
|
import org.dspace.core.Constants;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
|
import org.dspace.discovery.DiscoverQuery;
|
||||||
|
import org.dspace.discovery.DiscoverResult;
|
||||||
|
import org.dspace.discovery.IndexableObject;
|
||||||
|
import org.dspace.discovery.SearchService;
|
||||||
|
import org.dspace.discovery.SearchServiceException;
|
||||||
|
import org.dspace.discovery.indexobject.IndexableCollection;
|
||||||
|
import org.dspace.discovery.indexobject.IndexableCommunity;
|
||||||
import org.dspace.eperson.EPerson;
|
import org.dspace.eperson.EPerson;
|
||||||
import org.dspace.eperson.Group;
|
import org.dspace.eperson.Group;
|
||||||
import org.dspace.eperson.service.GroupService;
|
import org.dspace.eperson.service.GroupService;
|
||||||
@@ -52,6 +61,9 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||||||
* group 0, which is anonymous - all EPeople are members of group 0.
|
* group 0, which is anonymous - all EPeople are members of group 0.
|
||||||
*/
|
*/
|
||||||
public class AuthorizeServiceImpl implements AuthorizeService {
|
public class AuthorizeServiceImpl implements AuthorizeService {
|
||||||
|
|
||||||
|
private static Logger log = LogManager.getLogger(AuthorizeServiceImpl.class);
|
||||||
|
|
||||||
@Autowired(required = true)
|
@Autowired(required = true)
|
||||||
protected BitstreamService bitstreamService;
|
protected BitstreamService bitstreamService;
|
||||||
@Autowired(required = true)
|
@Autowired(required = true)
|
||||||
@@ -64,6 +76,9 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
protected WorkspaceItemService workspaceItemService;
|
protected WorkspaceItemService workspaceItemService;
|
||||||
@Autowired(required = true)
|
@Autowired(required = true)
|
||||||
protected WorkflowItemService workflowItemService;
|
protected WorkflowItemService workflowItemService;
|
||||||
|
@Autowired(required = true)
|
||||||
|
private SearchService searchService;
|
||||||
|
|
||||||
|
|
||||||
protected AuthorizeServiceImpl() {
|
protected AuthorizeServiceImpl() {
|
||||||
|
|
||||||
@@ -428,46 +443,6 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean isCommunityAdmin(Context c) throws SQLException {
|
|
||||||
EPerson e = c.getCurrentUser();
|
|
||||||
return isCommunityAdmin(c, e);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isCommunityAdmin(Context c, EPerson e) throws SQLException {
|
|
||||||
if (e != null) {
|
|
||||||
List<ResourcePolicy> policies = resourcePolicyService.find(c, e,
|
|
||||||
groupService.allMemberGroups(c, e),
|
|
||||||
Constants.ADMIN, Constants.COMMUNITY);
|
|
||||||
|
|
||||||
if (CollectionUtils.isNotEmpty(policies)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isCollectionAdmin(Context c) throws SQLException {
|
|
||||||
EPerson e = c.getCurrentUser();
|
|
||||||
return isCollectionAdmin(c, e);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isCollectionAdmin(Context c, EPerson e) throws SQLException {
|
|
||||||
if (e != null) {
|
|
||||||
List<ResourcePolicy> policies = resourcePolicyService.find(c, e,
|
|
||||||
groupService.allMemberGroups(c, e),
|
|
||||||
Constants.ADMIN, Constants.COLLECTION);
|
|
||||||
|
|
||||||
if (CollectionUtils.isNotEmpty(policies) || isCommunityAdmin(c, e)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
///////////////////////////////////////////////
|
///////////////////////////////////////////////
|
||||||
// policy manipulation methods
|
// policy manipulation methods
|
||||||
///////////////////////////////////////////////
|
///////////////////////////////////////////////
|
||||||
@@ -787,4 +762,191 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
return resourcePolicyService.findExceptRpType(c, o, actionID, rpType);
|
return resourcePolicyService.findExceptRpType(c, o, actionID, rpType);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks that the context's current user is a community admin in the site by querying the solr database.
|
||||||
|
*
|
||||||
|
* @param context context with the current user
|
||||||
|
* @return true if the current user is a community admin in the site
|
||||||
|
* false when this is not the case, or an exception occurred
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public boolean isCommunityAdmin(Context context) throws SQLException {
|
||||||
|
return performCheck(context, "search.resourcetype:" + IndexableCommunity.TYPE);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks that the context's current user is a collection admin in the site by querying the solr database.
|
||||||
|
*
|
||||||
|
* @param context context with the current user
|
||||||
|
* @return true if the current user is a collection admin in the site
|
||||||
|
* false when this is not the case, or an exception occurred
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public boolean isCollectionAdmin(Context context) throws SQLException {
|
||||||
|
return performCheck(context, "search.resourcetype:" + IndexableCollection.TYPE);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks that the context's current user is a community or collection admin in the site.
|
||||||
|
*
|
||||||
|
* @param context context with the current user
|
||||||
|
* @return true if the current user is a community or collection admin in the site
|
||||||
|
* false when this is not the case, or an exception occurred
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public boolean isComColAdmin(Context context) throws SQLException {
|
||||||
|
return performCheck(context,
|
||||||
|
"(search.resourcetype:" + IndexableCommunity.TYPE + " OR search.resourcetype:" +
|
||||||
|
IndexableCollection.TYPE + ")");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Finds communities for which the logged in user has ADMIN rights.
|
||||||
|
*
|
||||||
|
* @param context the context whose user is checked against
|
||||||
|
* @param query the optional extra query
|
||||||
|
* @param offset the offset for pagination
|
||||||
|
* @param limit the amount of dso's to return
|
||||||
|
* @return a list of communities for which the logged in user has ADMIN rights.
|
||||||
|
* @throws SearchServiceException
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public List<Community> findAdminAuthorizedCommunity(Context context, String query, int offset, int limit)
|
||||||
|
throws SearchServiceException, SQLException {
|
||||||
|
List<Community> communities = new ArrayList<>();
|
||||||
|
query = formatCustomQuery(query);
|
||||||
|
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
||||||
|
IndexableCommunity.TYPE,
|
||||||
|
offset, limit);
|
||||||
|
for (IndexableObject solrCollections : discoverResult.getIndexableObjects()) {
|
||||||
|
Community community = ((IndexableCommunity) solrCollections).getIndexedObject();
|
||||||
|
communities.add(community);
|
||||||
|
}
|
||||||
|
return communities;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Finds the amount of communities for which the logged in user has ADMIN rights.
|
||||||
|
*
|
||||||
|
* @param context the context whose user is checked against
|
||||||
|
* @param query the optional extra query
|
||||||
|
* @return the number of communities for which the logged in user has ADMIN rights.
|
||||||
|
* @throws SearchServiceException
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public long countAdminAuthorizedCommunity(Context context, String query)
|
||||||
|
throws SearchServiceException, SQLException {
|
||||||
|
query = formatCustomQuery(query);
|
||||||
|
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
||||||
|
IndexableCommunity.TYPE,
|
||||||
|
null, null);
|
||||||
|
return discoverResult.getTotalSearchResults();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Finds collections for which the logged in user has ADMIN rights.
|
||||||
|
*
|
||||||
|
* @param context the context whose user is checked against
|
||||||
|
* @param query the optional extra query
|
||||||
|
* @param offset the offset for pagination
|
||||||
|
* @param limit the amount of dso's to return
|
||||||
|
* @return a list of collections for which the logged in user has ADMIN rights.
|
||||||
|
* @throws SearchServiceException
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public List<Collection> findAdminAuthorizedCollection(Context context, String query, int offset, int limit)
|
||||||
|
throws SearchServiceException, SQLException {
|
||||||
|
List<Collection> collections = new ArrayList<>();
|
||||||
|
if (context.getCurrentUser() == null) {
|
||||||
|
return collections;
|
||||||
|
}
|
||||||
|
|
||||||
|
query = formatCustomQuery(query);
|
||||||
|
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
||||||
|
IndexableCollection.TYPE,
|
||||||
|
offset, limit);
|
||||||
|
for (IndexableObject solrCollections : discoverResult.getIndexableObjects()) {
|
||||||
|
Collection collection = ((IndexableCollection) solrCollections).getIndexedObject();
|
||||||
|
collections.add(collection);
|
||||||
|
}
|
||||||
|
return collections;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Finds the amount of collections for which the logged in user has ADMIN rights.
|
||||||
|
*
|
||||||
|
* @param context the context whose user is checked against
|
||||||
|
* @param query the optional extra query
|
||||||
|
* @return the number of collections for which the logged in user has ADMIN rights.
|
||||||
|
* @throws SearchServiceException
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public long countAdminAuthorizedCollection(Context context, String query)
|
||||||
|
throws SearchServiceException, SQLException {
|
||||||
|
query = formatCustomQuery(query);
|
||||||
|
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
||||||
|
IndexableCollection.TYPE,
|
||||||
|
null, null);
|
||||||
|
return discoverResult.getTotalSearchResults();
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean performCheck(Context context, String query) throws SQLException {
|
||||||
|
if (context.getCurrentUser() == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
DiscoverResult discoverResult = getDiscoverResult(context, query, null, null);
|
||||||
|
if (discoverResult.getTotalSearchResults() > 0) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
} catch (SearchServiceException e) {
|
||||||
|
log.error("Failed getting getting community/collection admin status for "
|
||||||
|
+ context.getCurrentUser().getEmail() + " The search error is: " + e.getMessage()
|
||||||
|
+ " The search resourceType filter was: " + query);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
private DiscoverResult getDiscoverResult(Context context, String query, Integer offset, Integer limit)
|
||||||
|
throws SearchServiceException, SQLException {
|
||||||
|
String groupQuery = getGroupToQuery(groupService.allMemberGroups(context, context.getCurrentUser()));
|
||||||
|
|
||||||
|
DiscoverQuery discoverQuery = new DiscoverQuery();
|
||||||
|
if (!this.isAdmin(context)) {
|
||||||
|
query = query + " AND (" +
|
||||||
|
"admin:e" + context.getCurrentUser().getID() + groupQuery + ")";
|
||||||
|
}
|
||||||
|
discoverQuery.setQuery(query);
|
||||||
|
if (offset != null) {
|
||||||
|
discoverQuery.setStart(offset);
|
||||||
|
}
|
||||||
|
if (limit != null) {
|
||||||
|
discoverQuery.setMaxResults(limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
return searchService.search(context, discoverQuery);
|
||||||
|
}
|
||||||
|
|
||||||
|
private String getGroupToQuery(List<Group> groups) {
|
||||||
|
StringBuilder groupQuery = new StringBuilder();
|
||||||
|
|
||||||
|
if (groups != null) {
|
||||||
|
for (Group group: groups) {
|
||||||
|
groupQuery.append(" OR admin:g");
|
||||||
|
groupQuery.append(group.getID());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return groupQuery.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
private String formatCustomQuery(String query) {
|
||||||
|
if (StringUtils.isBlank(query)) {
|
||||||
|
return "";
|
||||||
|
} else {
|
||||||
|
return query + " AND ";
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@@ -14,8 +14,10 @@ import java.util.List;
|
|||||||
import org.dspace.authorize.AuthorizeException;
|
import org.dspace.authorize.AuthorizeException;
|
||||||
import org.dspace.authorize.ResourcePolicy;
|
import org.dspace.authorize.ResourcePolicy;
|
||||||
import org.dspace.content.Collection;
|
import org.dspace.content.Collection;
|
||||||
|
import org.dspace.content.Community;
|
||||||
import org.dspace.content.DSpaceObject;
|
import org.dspace.content.DSpaceObject;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
|
import org.dspace.discovery.SearchServiceException;
|
||||||
import org.dspace.eperson.EPerson;
|
import org.dspace.eperson.EPerson;
|
||||||
import org.dspace.eperson.Group;
|
import org.dspace.eperson.Group;
|
||||||
|
|
||||||
@@ -209,30 +211,6 @@ public interface AuthorizeService {
|
|||||||
*/
|
*/
|
||||||
public boolean isAdmin(Context c, EPerson e) throws SQLException;
|
public boolean isAdmin(Context c, EPerson e) throws SQLException;
|
||||||
|
|
||||||
public boolean isCommunityAdmin(Context c) throws SQLException;
|
|
||||||
|
|
||||||
public boolean isCollectionAdmin(Context c) throws SQLException;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check to see if a specific user is Community admin
|
|
||||||
*
|
|
||||||
* @param c current context
|
|
||||||
* @param e the user to check
|
|
||||||
* @return true if user is an admin of some community
|
|
||||||
* @throws SQLException
|
|
||||||
*/
|
|
||||||
public boolean isCommunityAdmin(Context c, EPerson e) throws SQLException;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check to see if a specific user is Collection admin
|
|
||||||
*
|
|
||||||
* @param c current context
|
|
||||||
* @param e the user to check
|
|
||||||
* @return true if user is an admin of some collection
|
|
||||||
* @throws SQLException if database error
|
|
||||||
*/
|
|
||||||
public boolean isCollectionAdmin(Context c, EPerson e) throws SQLException;
|
|
||||||
|
|
||||||
///////////////////////////////////////////////
|
///////////////////////////////////////////////
|
||||||
// policy manipulation methods
|
// policy manipulation methods
|
||||||
///////////////////////////////////////////////
|
///////////////////////////////////////////////
|
||||||
@@ -536,4 +514,82 @@ public interface AuthorizeService {
|
|||||||
void switchPoliciesAction(Context context, DSpaceObject dso, int fromAction, int toAction)
|
void switchPoliciesAction(Context context, DSpaceObject dso, int fromAction, int toAction)
|
||||||
throws SQLException, AuthorizeException;
|
throws SQLException, AuthorizeException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks that the context's current user is a community admin in the site by querying the solr database.
|
||||||
|
*
|
||||||
|
* @param context context with the current user
|
||||||
|
* @return true if the current user is a community admin in the site
|
||||||
|
* false when this is not the case, or an exception occurred
|
||||||
|
*/
|
||||||
|
boolean isCommunityAdmin(Context context) throws SQLException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks that the context's current user is a collection admin in the site by querying the solr database.
|
||||||
|
*
|
||||||
|
* @param context context with the current user
|
||||||
|
* @return true if the current user is a collection admin in the site
|
||||||
|
* false when this is not the case, or an exception occurred
|
||||||
|
*/
|
||||||
|
boolean isCollectionAdmin(Context context) throws SQLException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks that the context's current user is a community or collection admin in the site.
|
||||||
|
*
|
||||||
|
* @param context context with the current user
|
||||||
|
* @return true if the current user is a community or collection admin in the site
|
||||||
|
* false when this is not the case, or an exception occurred
|
||||||
|
*/
|
||||||
|
boolean isComColAdmin(Context context) throws SQLException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Finds communities for which the current user is admin, AND which match the query.
|
||||||
|
*
|
||||||
|
* @param context context with the current user
|
||||||
|
* @param query the query for which to filter the results more
|
||||||
|
* @param offset used for pagination of the results
|
||||||
|
* @param limit used for pagination of the results
|
||||||
|
* @return the number of matching communities
|
||||||
|
* @throws SearchServiceException
|
||||||
|
* @throws SQLException
|
||||||
|
*/
|
||||||
|
List<Community> findAdminAuthorizedCommunity(Context context, String query, int offset, int limit)
|
||||||
|
throws SearchServiceException, SQLException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Counts communities for which the current user is admin, AND which match the query.
|
||||||
|
*
|
||||||
|
* @param context context with the current user
|
||||||
|
* @param query the query for which to filter the results more
|
||||||
|
* @return the matching communities
|
||||||
|
* @throws SearchServiceException
|
||||||
|
* @throws SQLException
|
||||||
|
*/
|
||||||
|
long countAdminAuthorizedCommunity(Context context, String query)
|
||||||
|
throws SearchServiceException, SQLException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Finds collections for which the current user is admin, AND which match the query.
|
||||||
|
*
|
||||||
|
* @param context context with the current user
|
||||||
|
* @param query the query for which to filter the results more
|
||||||
|
* @param offset used for pagination of the results
|
||||||
|
* @param limit used for pagination of the results
|
||||||
|
* @return the matching collections
|
||||||
|
* @throws SearchServiceException
|
||||||
|
* @throws SQLException
|
||||||
|
*/
|
||||||
|
List<Collection> findAdminAuthorizedCollection(Context context, String query, int offset, int limit)
|
||||||
|
throws SearchServiceException, SQLException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Counts collections for which the current user is admin, AND which match the query.
|
||||||
|
*
|
||||||
|
* @param context context with the current user
|
||||||
|
* @param query the query for which to filter the results more
|
||||||
|
* @return the number of matching collections
|
||||||
|
* @throws SearchServiceException
|
||||||
|
* @throws SQLException
|
||||||
|
*/
|
||||||
|
long countAdminAuthorizedCollection(Context context, String query)
|
||||||
|
throws SearchServiceException, SQLException;
|
||||||
}
|
}
|
||||||
|
@@ -305,6 +305,7 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
|||||||
// metadataValueService.update(context, metadataValue);
|
// metadataValueService.update(context, metadataValue);
|
||||||
dso.addDetails(metadataField.toString());
|
dso.addDetails(metadataField.toString());
|
||||||
}
|
}
|
||||||
|
setMetadataModified(dso);
|
||||||
return newMetadata;
|
return newMetadata;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -51,7 +51,7 @@ public class EntityServiceImpl implements EntityService {
|
|||||||
@Override
|
@Override
|
||||||
public EntityType getType(Context context, Entity entity) throws SQLException {
|
public EntityType getType(Context context, Entity entity) throws SQLException {
|
||||||
Item item = entity.getItem();
|
Item item = entity.getItem();
|
||||||
List<MetadataValue> list = itemService.getMetadata(item, "relationship", "type", null, Item.ANY, false);
|
List<MetadataValue> list = itemService.getMetadata(item, "dspace", "entity", "type", Item.ANY, false);
|
||||||
if (!list.isEmpty()) {
|
if (!list.isEmpty()) {
|
||||||
return entityTypeService.findByEntityType(context, list.get(0).getValue());
|
return entityTypeService.findByEntityType(context, list.get(0).getValue());
|
||||||
} else {
|
} else {
|
||||||
|
@@ -113,6 +113,16 @@ public class Item extends DSpaceObject implements DSpaceObjectLegacySupport {
|
|||||||
@Transient
|
@Transient
|
||||||
private transient ItemService itemService;
|
private transient ItemService itemService;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* True if anything else was changed since last metadata retrieval()
|
||||||
|
* (to drive metadata cache)
|
||||||
|
*/
|
||||||
|
@Transient
|
||||||
|
private boolean modifiedMetadataCache = true;
|
||||||
|
|
||||||
|
@Transient
|
||||||
|
private List<MetadataValue> cachedMetadata = new ArrayList<>();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Protected constructor, create object using:
|
* Protected constructor, create object using:
|
||||||
* {@link org.dspace.content.service.ItemService#create(Context, WorkspaceItem)}
|
* {@link org.dspace.content.service.ItemService#create(Context, WorkspaceItem)}
|
||||||
@@ -374,4 +384,23 @@ public class Item extends DSpaceObject implements DSpaceObjectLegacySupport {
|
|||||||
}
|
}
|
||||||
return itemService;
|
return itemService;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void setMetadataModified() {
|
||||||
|
super.setMetadataModified();
|
||||||
|
modifiedMetadataCache = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isModifiedMetadataCache() {
|
||||||
|
return modifiedMetadataCache;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected List<MetadataValue> getCachedMetadata() {
|
||||||
|
return cachedMetadata;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void setCachedMetadata(List<MetadataValue> cachedMetadata) {
|
||||||
|
this.cachedMetadata = cachedMetadata;
|
||||||
|
modifiedMetadataCache = false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@@ -1328,42 +1328,33 @@ prevent the generation of resource policy entry values with null dspace_object a
|
|||||||
@Override
|
@Override
|
||||||
public List<MetadataValue> getMetadata(Item item, String schema, String element, String qualifier, String lang,
|
public List<MetadataValue> getMetadata(Item item, String schema, String element, String qualifier, String lang,
|
||||||
boolean enableVirtualMetadata) {
|
boolean enableVirtualMetadata) {
|
||||||
//Fields of the relation schema are virtual metadata
|
if (!enableVirtualMetadata) {
|
||||||
//except for relation.type which is the type of item in the model
|
log.debug("Called getMetadata for " + item.getID() + " without enableVirtualMetadata");
|
||||||
if (StringUtils.equals(schema, MetadataSchemaEnum.RELATION.getName()) && !StringUtils.equals(element, "type")) {
|
return super.getMetadata(item, schema, element, qualifier, lang);
|
||||||
|
|
||||||
List<RelationshipMetadataValue> relationMetadata = relationshipMetadataService
|
|
||||||
.getRelationshipMetadata(item, enableVirtualMetadata);
|
|
||||||
List<MetadataValue> listToReturn = new LinkedList<>();
|
|
||||||
for (MetadataValue metadataValue : relationMetadata) {
|
|
||||||
if (StringUtils.equals(metadataValue.getMetadataField().getElement(), element)) {
|
|
||||||
listToReturn.add(metadataValue);
|
|
||||||
}
|
}
|
||||||
}
|
if (item.isModifiedMetadataCache()) {
|
||||||
listToReturn = sortMetadataValueList(listToReturn);
|
log.debug("Called getMetadata for " + item.getID() + " with invalid cache");
|
||||||
|
//rebuild cache
|
||||||
return listToReturn;
|
List<MetadataValue> dbMetadataValues = item.getMetadata();
|
||||||
|
|
||||||
} else {
|
|
||||||
List<MetadataValue> dbMetadataValues = super.getMetadata(item, schema, element, qualifier, lang);
|
|
||||||
|
|
||||||
List<MetadataValue> fullMetadataValueList = new LinkedList<>();
|
List<MetadataValue> fullMetadataValueList = new LinkedList<>();
|
||||||
if (enableVirtualMetadata) {
|
|
||||||
fullMetadataValueList.addAll(relationshipMetadataService.getRelationshipMetadata(item, true));
|
fullMetadataValueList.addAll(relationshipMetadataService.getRelationshipMetadata(item, true));
|
||||||
|
|
||||||
}
|
|
||||||
fullMetadataValueList.addAll(dbMetadataValues);
|
fullMetadataValueList.addAll(dbMetadataValues);
|
||||||
|
|
||||||
List<MetadataValue> finalList = new LinkedList<>();
|
item.setCachedMetadata(sortMetadataValueList(fullMetadataValueList));
|
||||||
for (MetadataValue metadataValue : fullMetadataValueList) {
|
|
||||||
if (match(schema, element, qualifier, lang, metadataValue)) {
|
|
||||||
finalList.add(metadataValue);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
finalList = sortMetadataValueList(finalList);
|
|
||||||
return finalList;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
log.debug("Called getMetadata for " + item.getID() + " based on cache");
|
||||||
|
// Build up list of matching values based on the cache
|
||||||
|
List<MetadataValue> values = new ArrayList<>();
|
||||||
|
for (MetadataValue dcv : item.getCachedMetadata()) {
|
||||||
|
if (match(schema, element, qualifier, lang, dcv)) {
|
||||||
|
values.add(dcv);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create an array of matching values
|
||||||
|
return values;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@@ -164,6 +164,7 @@ public class Relationship implements ReloadableEntity<Integer> {
|
|||||||
*/
|
*/
|
||||||
public void setLeftPlace(int leftPlace) {
|
public void setLeftPlace(int leftPlace) {
|
||||||
this.leftPlace = leftPlace;
|
this.leftPlace = leftPlace;
|
||||||
|
leftItem.setMetadataModified();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -180,6 +181,7 @@ public class Relationship implements ReloadableEntity<Integer> {
|
|||||||
*/
|
*/
|
||||||
public void setRightPlace(int rightPlace) {
|
public void setRightPlace(int rightPlace) {
|
||||||
this.rightPlace = rightPlace;
|
this.rightPlace = rightPlace;
|
||||||
|
rightItem.setMetadataModified();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@@ -63,11 +63,9 @@ public class RelationshipMetadataServiceImpl implements RelationshipMetadataServ
|
|||||||
public String getEntityTypeStringFromMetadata(Item item) {
|
public String getEntityTypeStringFromMetadata(Item item) {
|
||||||
List<MetadataValue> list = item.getMetadata();
|
List<MetadataValue> list = item.getMetadata();
|
||||||
for (MetadataValue mdv : list) {
|
for (MetadataValue mdv : list) {
|
||||||
if (StringUtils.equals(mdv.getMetadataField().getMetadataSchema().getName(),
|
if (StringUtils.equals(mdv.getMetadataField().getMetadataSchema().getName(), "dspace")
|
||||||
"relationship")
|
&& StringUtils.equals(mdv.getMetadataField().getElement(), "entity")
|
||||||
&& StringUtils.equals(mdv.getMetadataField().getElement(),
|
&& StringUtils.equals(mdv.getMetadataField().getQualifier(), "type")) {
|
||||||
"type")) {
|
|
||||||
|
|
||||||
return mdv.getValue();
|
return mdv.getValue();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -257,8 +257,8 @@ public class RelationshipServiceImpl implements RelationshipService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private boolean verifyEntityTypes(Item itemToProcess, EntityType entityTypeToProcess) {
|
private boolean verifyEntityTypes(Item itemToProcess, EntityType entityTypeToProcess) {
|
||||||
List<MetadataValue> list = itemService.getMetadata(itemToProcess, "relationship", "type",
|
List<MetadataValue> list = itemService.getMetadata(itemToProcess, "dspace", "entity",
|
||||||
null, Item.ANY, false);
|
"type", Item.ANY, false);
|
||||||
if (list.isEmpty()) {
|
if (list.isEmpty()) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@@ -156,22 +156,6 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
|
|||||||
return ma.getMatches(query, start, limit, locale);
|
return ma.getMatches(query, start, limit, locale);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Choices getMatches(String fieldKey, String query, Collection collection, int start, int limit, String locale,
|
|
||||||
boolean externalInput) {
|
|
||||||
ChoiceAuthority ma = getAuthorityByFieldKeyCollection(fieldKey, collection);
|
|
||||||
if (ma == null) {
|
|
||||||
throw new IllegalArgumentException(
|
|
||||||
"No choices plugin was configured for field \"" + fieldKey
|
|
||||||
+ "\", collection=" + collection.getID().toString() + ".");
|
|
||||||
}
|
|
||||||
if (externalInput && ma instanceof SolrAuthority) {
|
|
||||||
((SolrAuthority) ma).addExternalResultsInNextMatches();
|
|
||||||
}
|
|
||||||
return ma.getMatches(query, start, limit, locale);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Choices getBestMatch(String fieldKey, String query, Collection collection,
|
public Choices getBestMatch(String fieldKey, String query, Collection collection,
|
||||||
String locale) {
|
String locale) {
|
||||||
|
@@ -13,6 +13,7 @@ import java.util.Arrays;
|
|||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
import javax.xml.xpath.XPath;
|
import javax.xml.xpath.XPath;
|
||||||
import javax.xml.xpath.XPathConstants;
|
import javax.xml.xpath.XPathConstants;
|
||||||
import javax.xml.xpath.XPathExpressionException;
|
import javax.xml.xpath.XPathExpressionException;
|
||||||
@@ -315,6 +316,9 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
|||||||
private String getNodeLabel(String key, boolean useHierarchy) {
|
private String getNodeLabel(String key, boolean useHierarchy) {
|
||||||
try {
|
try {
|
||||||
Node node = getNode(key);
|
Node node = getNode(key);
|
||||||
|
if (Objects.isNull(node)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
if (useHierarchy) {
|
if (useHierarchy) {
|
||||||
return this.buildString(node);
|
return this.buildString(node);
|
||||||
} else {
|
} else {
|
||||||
|
@@ -222,4 +222,12 @@ public class MetadataAuthorityServiceImpl implements MetadataAuthorityService {
|
|||||||
}
|
}
|
||||||
return copy;
|
return copy;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void clearCache() {
|
||||||
|
controlled.clear();
|
||||||
|
minConfidence.clear();
|
||||||
|
|
||||||
|
isAuthorityRequired = null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@@ -0,0 +1,87 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.content.authority;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import org.apache.commons.collections4.CollectionUtils;
|
||||||
|
import org.dspace.app.sherpa.SHERPAService;
|
||||||
|
import org.dspace.app.sherpa.v2.SHERPAResponse;
|
||||||
|
import org.dspace.utils.DSpace;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Journal-name authority based on SHERPA/RoMEO v2
|
||||||
|
*
|
||||||
|
* @author Larry Stone
|
||||||
|
* @author Andrea Bollini (andrea.bollini at 4science.it)
|
||||||
|
* @version $Revision $
|
||||||
|
* @see SHERPARoMEOProtocol
|
||||||
|
*/
|
||||||
|
public class SHERPARoMEOJournalTitle implements ChoiceAuthority {
|
||||||
|
private String pluginInstanceName;
|
||||||
|
|
||||||
|
public SHERPARoMEOJournalTitle() {
|
||||||
|
super();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Choices getMatches(String text, int start, int limit, String locale) {
|
||||||
|
// punt if there is no query text
|
||||||
|
if (text == null || text.trim().length() == 0) {
|
||||||
|
return new Choices(true);
|
||||||
|
}
|
||||||
|
SHERPAService sherpaService = new DSpace().getSingletonService(SHERPAService.class);
|
||||||
|
SHERPAResponse sherpaResponse = sherpaService.performRequest("publication", "title",
|
||||||
|
"contains word", text, 0, 0);
|
||||||
|
Choices result;
|
||||||
|
if (CollectionUtils.isNotEmpty(sherpaResponse.getJournals())) {
|
||||||
|
List<Choice> list = sherpaResponse
|
||||||
|
.getJournals().stream()
|
||||||
|
.skip(start)
|
||||||
|
.limit(limit)
|
||||||
|
.map(sherpaJournal -> new Choice(sherpaJournal.getIssns().get(0),
|
||||||
|
sherpaJournal.getTitles().get(0), sherpaJournal.getTitles().get(0)))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
int total = sherpaResponse.getJournals().size();
|
||||||
|
result = new Choices(list.toArray(new Choice[list.size()]), start, total, Choices.CF_ACCEPTED,
|
||||||
|
total > (start + limit));
|
||||||
|
} else {
|
||||||
|
result = new Choices(false);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Choices getBestMatch(String text, String locale) {
|
||||||
|
return getMatches(text, 0, 1, locale);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getLabel(String key, String locale) {
|
||||||
|
SHERPAService sherpaService = new DSpace().getSingletonService(SHERPAService.class);
|
||||||
|
SHERPAResponse sherpaResponse = sherpaService.performRequest("publication", "issn",
|
||||||
|
"equals", key, 0, 1);
|
||||||
|
if (CollectionUtils.isNotEmpty(sherpaResponse.getJournals())) {
|
||||||
|
return sherpaResponse.getJournals().get(0).getTitles().get(0);
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setPluginInstanceName(String name) {
|
||||||
|
this.pluginInstanceName = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getPluginInstanceName() {
|
||||||
|
return pluginInstanceName;
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,87 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.content.authority;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import org.apache.commons.collections4.CollectionUtils;
|
||||||
|
import org.dspace.app.sherpa.SHERPAService;
|
||||||
|
import org.dspace.app.sherpa.v2.SHERPAPublisherResponse;
|
||||||
|
import org.dspace.utils.DSpace;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Publisher name authority based on SHERPA/RoMEO v2
|
||||||
|
*
|
||||||
|
* @author Larry Stone
|
||||||
|
* @author Andrea Bollini (andrea.bollini at 4science.it)
|
||||||
|
* @version $Revision $
|
||||||
|
* @see SHERPARoMEOProtocol
|
||||||
|
*/
|
||||||
|
public class SHERPARoMEOPublisher implements ChoiceAuthority {
|
||||||
|
private String pluginInstanceName;
|
||||||
|
|
||||||
|
public SHERPARoMEOPublisher() {
|
||||||
|
super();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Choices getMatches(String text, int start, int limit, String locale) {
|
||||||
|
// punt if there is no query text
|
||||||
|
if (text == null || text.trim().length() == 0) {
|
||||||
|
return new Choices(true);
|
||||||
|
}
|
||||||
|
SHERPAService sherpaService = new DSpace().getSingletonService(SHERPAService.class);
|
||||||
|
SHERPAPublisherResponse sherpaResponse = sherpaService.performPublisherRequest("publisher", "name",
|
||||||
|
"contains word", text, 0, 0);
|
||||||
|
Choices result;
|
||||||
|
if (CollectionUtils.isNotEmpty(sherpaResponse.getPublishers())) {
|
||||||
|
List<Choice> list = sherpaResponse
|
||||||
|
.getPublishers().stream()
|
||||||
|
.skip(start)
|
||||||
|
.limit(limit)
|
||||||
|
.map(sherpaPublisher ->
|
||||||
|
new Choice(sherpaPublisher.getIdentifier(),
|
||||||
|
sherpaPublisher.getName(), sherpaPublisher.getName()))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
int total = sherpaResponse.getPublishers().size();
|
||||||
|
result = new Choices(list.toArray(new Choice[list.size()]), start, total, Choices.CF_ACCEPTED,
|
||||||
|
total > (start + limit));
|
||||||
|
} else {
|
||||||
|
result = new Choices(false);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Choices getBestMatch(String text, String locale) {
|
||||||
|
return getMatches(text, 0, 1, locale);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getLabel(String key, String locale) {
|
||||||
|
SHERPAService sherpaService = new DSpace().getSingletonService(SHERPAService.class);
|
||||||
|
SHERPAPublisherResponse sherpaResponse = sherpaService.performPublisherRequest("publisher", "id",
|
||||||
|
"equals", key, 0, 1);
|
||||||
|
if (CollectionUtils.isNotEmpty(sherpaResponse.getPublishers())) {
|
||||||
|
return sherpaResponse.getPublishers().get(0).getName();
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setPluginInstanceName(String name) {
|
||||||
|
this.pluginInstanceName = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getPluginInstanceName() {
|
||||||
|
return pluginInstanceName;
|
||||||
|
}
|
||||||
|
}
|
@@ -53,8 +53,6 @@ public class SolrAuthority implements ChoiceAuthority {
|
|||||||
|
|
||||||
private static final Logger log = LogManager.getLogger(SolrAuthority.class);
|
private static final Logger log = LogManager.getLogger(SolrAuthority.class);
|
||||||
|
|
||||||
protected boolean externalResults = false;
|
|
||||||
|
|
||||||
protected final AuthorityValueService authorityValueService
|
protected final AuthorityValueService authorityValueService
|
||||||
= AuthorityServiceFactory.getInstance().getAuthorityValueService();
|
= AuthorityServiceFactory.getInstance().getAuthorityValueService();
|
||||||
|
|
||||||
@@ -95,9 +93,6 @@ public class SolrAuthority implements ChoiceAuthority {
|
|||||||
queryArgs.set(CommonParams.START, start);
|
queryArgs.set(CommonParams.START, start);
|
||||||
//We add one to our facet limit so that we know if there are more matches
|
//We add one to our facet limit so that we know if there are more matches
|
||||||
int maxNumberOfSolrResults = limit + 1;
|
int maxNumberOfSolrResults = limit + 1;
|
||||||
if (externalResults) {
|
|
||||||
maxNumberOfSolrResults = configurationService.getIntProperty("xmlui.lookup.select.size", 12);
|
|
||||||
}
|
|
||||||
queryArgs.set(CommonParams.ROWS, maxNumberOfSolrResults);
|
queryArgs.set(CommonParams.ROWS, maxNumberOfSolrResults);
|
||||||
|
|
||||||
String sortField = "value";
|
String sortField = "value";
|
||||||
@@ -135,14 +130,16 @@ public class SolrAuthority implements ChoiceAuthority {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (externalResults && StringUtils.isNotBlank(text)) {
|
if (StringUtils.isNotBlank(text)) {
|
||||||
int sizeFromSolr = alreadyPresent.size();
|
int sizeFromSolr = alreadyPresent.size();
|
||||||
int maxExternalResults = limit <= 10 ? Math.max(limit - sizeFromSolr, 2) : Math
|
int maxExternalResults = sizeFromSolr < limit ? limit + 1 : sizeFromSolr + 1;
|
||||||
.max(limit - 10 - sizeFromSolr, 2) + limit - 10;
|
// force an upper limit for external results
|
||||||
|
if (maxExternalResults > 10) {
|
||||||
|
maxExternalResults = 10;
|
||||||
|
}
|
||||||
addExternalResults(text, choices, alreadyPresent, maxExternalResults);
|
addExternalResults(text, choices, alreadyPresent, maxExternalResults);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// hasMore = (authDocs.size() == (limit + 1));
|
// hasMore = (authDocs.size() == (limit + 1));
|
||||||
hasMore = true;
|
hasMore = true;
|
||||||
}
|
}
|
||||||
@@ -171,8 +168,9 @@ public class SolrAuthority implements ChoiceAuthority {
|
|||||||
int max) {
|
int max) {
|
||||||
if (source != null) {
|
if (source != null) {
|
||||||
try {
|
try {
|
||||||
|
// max has been already adapted to consider the need to filter already found entries
|
||||||
List<AuthorityValue> values = source
|
List<AuthorityValue> values = source
|
||||||
.queryAuthorities(text, max * 2); // max*2 because results get filtered
|
.queryAuthorities(text, max);
|
||||||
|
|
||||||
// filtering loop
|
// filtering loop
|
||||||
Iterator<AuthorityValue> iterator = values.iterator();
|
Iterator<AuthorityValue> iterator = values.iterator();
|
||||||
@@ -196,7 +194,6 @@ public class SolrAuthority implements ChoiceAuthority {
|
|||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.error("Error", e);
|
log.error("Error", e);
|
||||||
}
|
}
|
||||||
this.externalResults = false;
|
|
||||||
} else {
|
} else {
|
||||||
log.warn("external source for authority not configured");
|
log.warn("external source for authority not configured");
|
||||||
}
|
}
|
||||||
@@ -288,10 +285,6 @@ public class SolrAuthority implements ChoiceAuthority {
|
|||||||
return manager.getServiceByName(AuthoritySearchService.class.getName(), AuthoritySearchService.class);
|
return manager.getServiceByName(AuthoritySearchService.class.getName(), AuthoritySearchService.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void addExternalResultsInNextMatches() {
|
|
||||||
this.externalResults = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setPluginInstanceName(String name) {
|
public void setPluginInstanceName(String name) {
|
||||||
authorityName = name;
|
authorityName = name;
|
||||||
|
@@ -89,9 +89,6 @@ public interface ChoiceAuthorityService {
|
|||||||
public Choices getMatches(String fieldKey, String query, Collection collection,
|
public Choices getMatches(String fieldKey, String query, Collection collection,
|
||||||
int start, int limit, String locale);
|
int start, int limit, String locale);
|
||||||
|
|
||||||
public Choices getMatches(String fieldKey, String query, Collection collection, int start, int limit, String locale,
|
|
||||||
boolean externalInput);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Wrapper that calls getBestMatch method of the plugin corresponding to
|
* Wrapper that calls getBestMatch method of the plugin corresponding to
|
||||||
* the metadata field defined by single field key.
|
* the metadata field defined by single field key.
|
||||||
|
@@ -112,4 +112,9 @@ public interface MetadataAuthorityService {
|
|||||||
* @return the list of metadata field with authority control
|
* @return the list of metadata field with authority control
|
||||||
*/
|
*/
|
||||||
public List<String> getAuthorityMetadata();
|
public List<String> getAuthorityMetadata();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method has been created to have a way of clearing the cache kept inside the service
|
||||||
|
*/
|
||||||
|
public void clearCache();
|
||||||
}
|
}
|
||||||
|
@@ -8,8 +8,10 @@
|
|||||||
package org.dspace.content.dao.impl;
|
package org.dspace.content.dao.impl;
|
||||||
|
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
import javax.persistence.Query;
|
import javax.persistence.Query;
|
||||||
import javax.persistence.criteria.CriteriaBuilder;
|
import javax.persistence.criteria.CriteriaBuilder;
|
||||||
import javax.persistence.criteria.CriteriaQuery;
|
import javax.persistence.criteria.CriteriaQuery;
|
||||||
@@ -17,6 +19,7 @@ import javax.persistence.criteria.Join;
|
|||||||
import javax.persistence.criteria.Root;
|
import javax.persistence.criteria.Root;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.dspace.content.MetadataField;
|
import org.dspace.content.MetadataField;
|
||||||
import org.dspace.content.MetadataField_;
|
import org.dspace.content.MetadataField_;
|
||||||
import org.dspace.content.MetadataSchema;
|
import org.dspace.content.MetadataSchema;
|
||||||
@@ -24,6 +27,7 @@ import org.dspace.content.MetadataSchema_;
|
|||||||
import org.dspace.content.dao.MetadataFieldDAO;
|
import org.dspace.content.dao.MetadataFieldDAO;
|
||||||
import org.dspace.core.AbstractHibernateDAO;
|
import org.dspace.core.AbstractHibernateDAO;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
|
import org.hibernate.Session;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Hibernate implementation of the Database Access Object interface class for the MetadataField object.
|
* Hibernate implementation of the Database Access Object interface class for the MetadataField object.
|
||||||
@@ -33,6 +37,17 @@ import org.dspace.core.Context;
|
|||||||
* @author kevinvandevelde at atmire.com
|
* @author kevinvandevelde at atmire.com
|
||||||
*/
|
*/
|
||||||
public class MetadataFieldDAOImpl extends AbstractHibernateDAO<MetadataField> implements MetadataFieldDAO {
|
public class MetadataFieldDAOImpl extends AbstractHibernateDAO<MetadataField> implements MetadataFieldDAO {
|
||||||
|
/**
|
||||||
|
* log4j logger
|
||||||
|
*/
|
||||||
|
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(MetadataFieldDAOImpl.class);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cache for improvement the performance of searching metadata fields
|
||||||
|
* This cache only stores IDs, the actual MetadataField is retrieved from hibernate
|
||||||
|
*/
|
||||||
|
private static Map<String, Integer> cachedFields = new HashMap();
|
||||||
|
|
||||||
protected MetadataFieldDAOImpl() {
|
protected MetadataFieldDAOImpl() {
|
||||||
super();
|
super();
|
||||||
}
|
}
|
||||||
@@ -79,6 +94,30 @@ public class MetadataFieldDAOImpl extends AbstractHibernateDAO<MetadataField> im
|
|||||||
@Override
|
@Override
|
||||||
public MetadataField findByElement(Context context, String metadataSchema, String element, String qualifier)
|
public MetadataField findByElement(Context context, String metadataSchema, String element, String qualifier)
|
||||||
throws SQLException {
|
throws SQLException {
|
||||||
|
String key = metadataSchema + "." + element + "." + qualifier;
|
||||||
|
if (cachedFields.containsKey(key)) {
|
||||||
|
Session session = getHibernateSession(context);
|
||||||
|
MetadataField metadataField = null;
|
||||||
|
try {
|
||||||
|
metadataField = session.load(MetadataField.class, cachedFields.get(key));
|
||||||
|
} catch (Throwable e) {
|
||||||
|
log.error("Failed to load metadata field " + key + " using ID " + cachedFields.get(key));
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
if (metadataField != null &&
|
||||||
|
(metadataField.getMetadataSchema().getName() + "." + metadataField.getElement() +
|
||||||
|
"." + metadataField.getQualifier()).equals(key)) {
|
||||||
|
return metadataField;
|
||||||
|
} else {
|
||||||
|
cachedFields.remove(key);
|
||||||
|
}
|
||||||
|
} catch (Throwable e) {
|
||||||
|
log.error("Failed to verify consistence of metadata field " + key +
|
||||||
|
" using ID " + cachedFields.get(key));
|
||||||
|
cachedFields.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Query query;
|
Query query;
|
||||||
|
|
||||||
if (StringUtils.isNotBlank(qualifier)) {
|
if (StringUtils.isNotBlank(qualifier)) {
|
||||||
@@ -103,7 +142,11 @@ public class MetadataFieldDAOImpl extends AbstractHibernateDAO<MetadataField> im
|
|||||||
}
|
}
|
||||||
query.setHint("org.hibernate.cacheable", Boolean.TRUE);
|
query.setHint("org.hibernate.cacheable", Boolean.TRUE);
|
||||||
|
|
||||||
return singleResult(query);
|
MetadataField metadataField = singleResult(query);
|
||||||
|
if (metadataField != null) {
|
||||||
|
cachedFields.put(key, metadataField.getID());
|
||||||
|
}
|
||||||
|
return metadataField;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@@ -13,10 +13,13 @@ import java.io.IOException;
|
|||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.math.BigInteger;
|
import java.math.BigInteger;
|
||||||
|
import java.net.Inet4Address;
|
||||||
|
import java.net.InetAddress;
|
||||||
import java.net.MalformedURLException;
|
import java.net.MalformedURLException;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
|
import java.net.UnknownHostException;
|
||||||
import java.rmi.dgc.VMID;
|
import java.rmi.dgc.VMID;
|
||||||
import java.security.MessageDigest;
|
import java.security.MessageDigest;
|
||||||
import java.security.NoSuchAlgorithmException;
|
import java.security.NoSuchAlgorithmException;
|
||||||
@@ -453,6 +456,40 @@ public final class Utils {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve the IP address(es) of a given URI string.
|
||||||
|
* <P>
|
||||||
|
* At this time, DSpace only supports IPv4, so this method will only return IPv4 addresses.
|
||||||
|
* @param uriString URI string
|
||||||
|
* @return IP address(es) in a String array (or null if not found)
|
||||||
|
*/
|
||||||
|
public static String[] getIPAddresses(String uriString) {
|
||||||
|
String[] ipAddresses = null;
|
||||||
|
|
||||||
|
// First, get the hostname
|
||||||
|
String hostname = getHostName(uriString);
|
||||||
|
|
||||||
|
if (StringUtils.isNotEmpty(hostname)) {
|
||||||
|
try {
|
||||||
|
// Then, get the list of all IPs for that hostname
|
||||||
|
InetAddress[] inetAddresses = InetAddress.getAllByName(hostname);
|
||||||
|
|
||||||
|
// Convert array of InetAddress objects to array of IP address Strings
|
||||||
|
ipAddresses = Arrays.stream(inetAddresses)
|
||||||
|
// Filter our array to ONLY include IPv4 addresses
|
||||||
|
.filter((address) -> address instanceof Inet4Address)
|
||||||
|
// Call getHostAddress() on each to get the IPv4 address as a string
|
||||||
|
.map((address) -> ((Inet4Address) address).getHostAddress())
|
||||||
|
.toArray(String[]::new);
|
||||||
|
} catch (UnknownHostException ex) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ipAddresses;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Replaces configuration placeholders within a String with the corresponding value
|
* Replaces configuration placeholders within a String with the corresponding value
|
||||||
* from DSpace's Configuration Service.
|
* from DSpace's Configuration Service.
|
||||||
|
@@ -157,6 +157,13 @@ public class IndexEventConsumer implements Consumer {
|
|||||||
} else {
|
} else {
|
||||||
log.debug("consume() adding event to update queue: " + event.toString());
|
log.debug("consume() adding event to update queue: " + event.toString());
|
||||||
objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, subject));
|
objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, subject));
|
||||||
|
|
||||||
|
// If the event subject is a Collection and the event object is an Item,
|
||||||
|
// also update the object in order to index mapped/unmapped Items
|
||||||
|
if (subject != null &&
|
||||||
|
subject.getType() == Constants.COLLECTION && object.getType() == Constants.ITEM) {
|
||||||
|
objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, object));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
@@ -11,6 +11,7 @@ import static org.apache.logging.log4j.LogManager.getLogger;
|
|||||||
|
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.apache.solr.client.solrj.SolrQuery;
|
import org.apache.solr.client.solrj.SolrQuery;
|
||||||
import org.dspace.authorize.service.AuthorizeService;
|
import org.dspace.authorize.service.AuthorizeService;
|
||||||
@@ -37,9 +38,16 @@ public class SolrServicePrivateItemPlugin implements SolrServiceSearchPlugin {
|
|||||||
try {
|
try {
|
||||||
// Prevents access if user has no administrative rights on the community or collection.
|
// Prevents access if user has no administrative rights on the community or collection.
|
||||||
// NOTE: the resource restriction plugin adds location filters for community and collection admins.
|
// NOTE: the resource restriction plugin adds location filters for community and collection admins.
|
||||||
if ( !authorizeService.isAdmin(context) && !authorizeService.isCommunityAdmin(context)
|
if (authorizeService.isAdmin(context)) {
|
||||||
&& !authorizeService.isCollectionAdmin(context)) {
|
return;
|
||||||
|
}
|
||||||
|
if (!StringUtils.equalsIgnoreCase(discoveryQuery.getDiscoveryConfigurationName(), "administrativeView")) {
|
||||||
solrQuery.addFilterQuery("NOT(discoverable:false)");
|
solrQuery.addFilterQuery("NOT(discoverable:false)");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (!authorizeService.isCommunityAdmin(context) && !authorizeService.isCollectionAdmin(context)) {
|
||||||
|
solrQuery.addFilterQuery("NOT(discoverable:false)");
|
||||||
|
|
||||||
}
|
}
|
||||||
} catch (SQLException ex) {
|
} catch (SQLException ex) {
|
||||||
log.error(LogManager.getHeader(context, "Error looking up authorization rights of current user",
|
log.error(LogManager.getHeader(context, "Error looking up authorization rights of current user",
|
||||||
|
@@ -11,6 +11,7 @@ import java.sql.SQLException;
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.apache.solr.client.solrj.SolrQuery;
|
import org.apache.solr.client.solrj.SolrQuery;
|
||||||
import org.apache.solr.common.SolrInputDocument;
|
import org.apache.solr.common.SolrInputDocument;
|
||||||
@@ -118,6 +119,7 @@ public class SolrServiceResourceRestrictionPlugin implements SolrServiceIndexPlu
|
|||||||
fieldValue = "e" + resourcePolicy.getEPerson().getID();
|
fieldValue = "e" + resourcePolicy.getEPerson().getID();
|
||||||
}
|
}
|
||||||
document.addField("read", fieldValue);
|
document.addField("read", fieldValue);
|
||||||
|
document.addField("admin", fieldValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
// remove the policy from the cache to save memory
|
// remove the policy from the cache to save memory
|
||||||
@@ -159,14 +161,15 @@ public class SolrServiceResourceRestrictionPlugin implements SolrServiceIndexPlu
|
|||||||
|
|
||||||
resourceQuery.append(")");
|
resourceQuery.append(")");
|
||||||
|
|
||||||
if (authorizeService.isCommunityAdmin(context)
|
String locations = DSpaceServicesFactory.getInstance()
|
||||||
|| authorizeService.isCollectionAdmin(context)) {
|
|
||||||
resourceQuery.append(" OR ");
|
|
||||||
resourceQuery.append(DSpaceServicesFactory.getInstance()
|
|
||||||
.getServiceManager()
|
.getServiceManager()
|
||||||
.getServiceByName(SearchService.class.getName(),
|
.getServiceByName(SearchService.class.getName(),
|
||||||
SearchService.class)
|
SearchService.class)
|
||||||
.createLocationQueryForAdministrableItems(context));
|
.createLocationQueryForAdministrableItems(context);
|
||||||
|
|
||||||
|
if (StringUtils.isNotBlank(locations)) {
|
||||||
|
resourceQuery.append(" OR ");
|
||||||
|
resourceQuery.append(locations);
|
||||||
}
|
}
|
||||||
|
|
||||||
solrQuery.addFilterQuery(resourceQuery.toString());
|
solrQuery.addFilterQuery(resourceQuery.toString());
|
||||||
|
@@ -15,6 +15,7 @@ import org.apache.solr.common.SolrInputDocument;
|
|||||||
import org.dspace.content.InProgressSubmission;
|
import org.dspace.content.InProgressSubmission;
|
||||||
import org.dspace.content.Item;
|
import org.dspace.content.Item;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
|
import org.dspace.discovery.SearchUtils;
|
||||||
import org.dspace.discovery.indexobject.factory.CollectionIndexFactory;
|
import org.dspace.discovery.indexobject.factory.CollectionIndexFactory;
|
||||||
import org.dspace.discovery.indexobject.factory.InprogressSubmissionIndexFactory;
|
import org.dspace.discovery.indexobject.factory.InprogressSubmissionIndexFactory;
|
||||||
import org.dspace.discovery.indexobject.factory.ItemIndexFactory;
|
import org.dspace.discovery.indexobject.factory.ItemIndexFactory;
|
||||||
@@ -47,7 +48,7 @@ public abstract class InprogressSubmissionIndexFactoryImpl
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void storeInprogressItemFields(Context context, SolrInputDocument doc,
|
public void storeInprogressItemFields(Context context, SolrInputDocument doc,
|
||||||
InProgressSubmission inProgressSubmission) throws SQLException {
|
InProgressSubmission inProgressSubmission) throws SQLException, IOException {
|
||||||
final Item item = inProgressSubmission.getItem();
|
final Item item = inProgressSubmission.getItem();
|
||||||
doc.addField("lastModified", SolrUtils.getDateFormatter().format(item.getLastModified()));
|
doc.addField("lastModified", SolrUtils.getDateFormatter().format(item.getLastModified()));
|
||||||
EPerson submitter = inProgressSubmission.getSubmitter();
|
EPerson submitter = inProgressSubmission.getSubmitter();
|
||||||
@@ -61,6 +62,9 @@ public abstract class InprogressSubmissionIndexFactoryImpl
|
|||||||
// get the location string (for searching by collection & community)
|
// get the location string (for searching by collection & community)
|
||||||
List<String> locations = indexableCollectionService.
|
List<String> locations = indexableCollectionService.
|
||||||
getCollectionLocations(context, inProgressSubmission.getCollection());
|
getCollectionLocations(context, inProgressSubmission.getCollection());
|
||||||
|
|
||||||
|
// Add item metadata
|
||||||
|
indexableItemService.addDiscoveryFields(doc, context, item, SearchUtils.getAllDiscoveryConfigurations(item));
|
||||||
indexableCollectionService.storeCommunityCollectionLocations(doc, locations);
|
indexableCollectionService.storeCommunityCollectionLocations(doc, locations);
|
||||||
}
|
}
|
||||||
}
|
}
|
@@ -19,8 +19,6 @@ import org.apache.solr.common.SolrInputDocument;
|
|||||||
import org.dspace.content.Item;
|
import org.dspace.content.Item;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.discovery.IndexableObject;
|
import org.dspace.discovery.IndexableObject;
|
||||||
import org.dspace.discovery.SearchUtils;
|
|
||||||
import org.dspace.discovery.configuration.DiscoveryConfiguration;
|
|
||||||
import org.dspace.discovery.indexobject.factory.WorkflowItemIndexFactory;
|
import org.dspace.discovery.indexobject.factory.WorkflowItemIndexFactory;
|
||||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||||
import org.dspace.xmlworkflow.storedcomponents.ClaimedTask;
|
import org.dspace.xmlworkflow.storedcomponents.ClaimedTask;
|
||||||
@@ -75,10 +73,6 @@ public class WorkflowItemIndexFactoryImpl
|
|||||||
final SolrInputDocument doc = super.buildDocument(context, indexableObject);
|
final SolrInputDocument doc = super.buildDocument(context, indexableObject);
|
||||||
final XmlWorkflowItem workflowItem = indexableObject.getIndexedObject();
|
final XmlWorkflowItem workflowItem = indexableObject.getIndexedObject();
|
||||||
final Item item = workflowItem.getItem();
|
final Item item = workflowItem.getItem();
|
||||||
// Add the item metadata as configured
|
|
||||||
List<DiscoveryConfiguration> discoveryConfigurations = SearchUtils
|
|
||||||
.getAllDiscoveryConfigurations(workflowItem);
|
|
||||||
indexableItemService.addDiscoveryFields(doc, context, item, discoveryConfigurations);
|
|
||||||
|
|
||||||
String acvalue = DSpaceServicesFactory.getInstance().getConfigurationService()
|
String acvalue = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||||
.getProperty("discovery.facet.namedtype.workflow.item");
|
.getProperty("discovery.facet.namedtype.workflow.item");
|
||||||
|
@@ -19,8 +19,6 @@ import org.apache.solr.common.SolrInputDocument;
|
|||||||
import org.dspace.content.WorkspaceItem;
|
import org.dspace.content.WorkspaceItem;
|
||||||
import org.dspace.content.service.WorkspaceItemService;
|
import org.dspace.content.service.WorkspaceItemService;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.discovery.SearchUtils;
|
|
||||||
import org.dspace.discovery.configuration.DiscoveryConfiguration;
|
|
||||||
import org.dspace.discovery.indexobject.factory.WorkspaceItemIndexFactory;
|
import org.dspace.discovery.indexobject.factory.WorkspaceItemIndexFactory;
|
||||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
@@ -71,12 +69,6 @@ public class WorkspaceItemIndexFactoryImpl
|
|||||||
acvalue = indexableObject.getTypeText();
|
acvalue = indexableObject.getTypeText();
|
||||||
}
|
}
|
||||||
addNamedResourceTypeIndex(doc, acvalue);
|
addNamedResourceTypeIndex(doc, acvalue);
|
||||||
final WorkspaceItem inProgressSubmission = indexableObject.getIndexedObject();
|
|
||||||
|
|
||||||
// Add the item metadata as configured
|
|
||||||
List<DiscoveryConfiguration> discoveryConfigurations = SearchUtils
|
|
||||||
.getAllDiscoveryConfigurations(inProgressSubmission);
|
|
||||||
indexableItemService.addDiscoveryFields(doc, context, inProgressSubmission.getItem(), discoveryConfigurations);
|
|
||||||
|
|
||||||
return doc;
|
return doc;
|
||||||
}
|
}
|
||||||
|
@@ -7,6 +7,7 @@
|
|||||||
*/
|
*/
|
||||||
package org.dspace.discovery.indexobject.factory;
|
package org.dspace.discovery.indexobject.factory;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
|
|
||||||
import org.apache.solr.common.SolrInputDocument;
|
import org.apache.solr.common.SolrInputDocument;
|
||||||
@@ -31,5 +32,5 @@ public interface InprogressSubmissionIndexFactory<T extends IndexableInProgressS
|
|||||||
* @throws SQLException If database error
|
* @throws SQLException If database error
|
||||||
*/
|
*/
|
||||||
void storeInprogressItemFields(Context context, SolrInputDocument doc, InProgressSubmission inProgressSubmission)
|
void storeInprogressItemFields(Context context, SolrInputDocument doc, InProgressSubmission inProgressSubmission)
|
||||||
throws SQLException;
|
throws SQLException, IOException;
|
||||||
}
|
}
|
@@ -90,7 +90,7 @@ public class SHERPAv2PublisherDataProvider implements ExternalDataProvider {
|
|||||||
public List<ExternalDataObject> searchExternalDataObjects(String query, int start, int limit) {
|
public List<ExternalDataObject> searchExternalDataObjects(String query, int start, int limit) {
|
||||||
// Search SHERPA for publishers with the query term in the title (name)
|
// Search SHERPA for publishers with the query term in the title (name)
|
||||||
SHERPAPublisherResponse sherpaResponse = sherpaService.performPublisherRequest(
|
SHERPAPublisherResponse sherpaResponse = sherpaService.performPublisherRequest(
|
||||||
"publication", "title", "contains word", query, start, limit);
|
"publisher", "name", "contains word", query, start, limit);
|
||||||
|
|
||||||
// If at least one publisher was found, convert to a list of ExternalDataObjects and return
|
// If at least one publisher was found, convert to a list of ExternalDataObjects and return
|
||||||
if (CollectionUtils.isNotEmpty(sherpaResponse.getPublishers())) {
|
if (CollectionUtils.isNotEmpty(sherpaResponse.getPublishers())) {
|
||||||
|
@@ -10,6 +10,8 @@ package org.dspace.handle;
|
|||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.regex.Matcher;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import org.apache.commons.collections4.CollectionUtils;
|
import org.apache.commons.collections4.CollectionUtils;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
@@ -57,6 +59,13 @@ public class HandleServiceImpl implements HandleService {
|
|||||||
@Autowired
|
@Autowired
|
||||||
protected SiteService siteService;
|
protected SiteService siteService;
|
||||||
|
|
||||||
|
private static final Pattern[] IDENTIFIER_PATTERNS = {
|
||||||
|
Pattern.compile("^hdl:(.*)$"),
|
||||||
|
Pattern.compile("^info:hdl/(.*)$"),
|
||||||
|
Pattern.compile("^https?://hdl\\.handle\\.net/(.*)$"),
|
||||||
|
Pattern.compile("^https?://.+/handle/(.*)$")
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Public Constructor
|
* Public Constructor
|
||||||
*/
|
*/
|
||||||
@@ -376,4 +385,39 @@ public class HandleServiceImpl implements HandleService {
|
|||||||
public int countTotal(Context context) throws SQLException {
|
public int countTotal(Context context) throws SQLException {
|
||||||
return handleDAO.countRows(context);
|
return handleDAO.countRows(context);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String parseHandle(String identifier) {
|
||||||
|
if (identifier == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (identifier.startsWith(getPrefix() + "/")) {
|
||||||
|
// prefix is the equivalent of 123456789 in 123456789/???; don't strip
|
||||||
|
return identifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
String canonicalPrefix = configurationService.getProperty("handle.canonical.prefix");
|
||||||
|
if (identifier.startsWith(canonicalPrefix + "/")) {
|
||||||
|
// prefix is the equivalent of https://hdl.handle.net/ in https://hdl.handle.net/123456789/???; strip
|
||||||
|
return StringUtils.stripStart(identifier, canonicalPrefix);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (Pattern pattern : IDENTIFIER_PATTERNS) {
|
||||||
|
Matcher matcher = pattern.matcher(identifier);
|
||||||
|
if (matcher.matches()) {
|
||||||
|
return matcher.group(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check additional prefixes supported in the config file
|
||||||
|
String[] additionalPrefixes = configurationService.getArrayProperty("handle.additional.prefixes");
|
||||||
|
for (String additionalPrefix : additionalPrefixes) {
|
||||||
|
if (identifier.startsWith(additionalPrefix + "/")) {
|
||||||
|
// prefix is the equivalent of 123456789 in 123456789/???; don't strip
|
||||||
|
return identifier;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@@ -181,4 +181,15 @@ public interface HandleService {
|
|||||||
public void modifyHandleDSpaceObject(Context context, String handle, DSpaceObject newOwner) throws SQLException;
|
public void modifyHandleDSpaceObject(Context context, String handle, DSpaceObject newOwner) throws SQLException;
|
||||||
|
|
||||||
int countTotal(Context context) throws SQLException;
|
int countTotal(Context context) throws SQLException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format a handle ~
|
||||||
|
* - hdl:123456789/1 -> 123456789/1
|
||||||
|
* - info:hdl/123456789/1 -> 123456789/1
|
||||||
|
* - https://hdl.handle.net/123456789/1 -> 123456789/1
|
||||||
|
*
|
||||||
|
* @param identifier
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
String parseHandle(String identifier);
|
||||||
}
|
}
|
||||||
|
@@ -60,32 +60,7 @@ public class HandleIdentifierProvider extends IdentifierProvider {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean supports(String identifier) {
|
public boolean supports(String identifier) {
|
||||||
String prefix = handleService.getPrefix();
|
return handleService.parseHandle(identifier) != null;
|
||||||
String canonicalPrefix = DSpaceServicesFactory.getInstance().getConfigurationService()
|
|
||||||
.getProperty("handle.canonical.prefix");
|
|
||||||
if (identifier == null) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
// return true if handle has valid starting pattern
|
|
||||||
if (identifier.startsWith(prefix + "/")
|
|
||||||
|| identifier.startsWith(canonicalPrefix)
|
|
||||||
|| identifier.startsWith("hdl:")
|
|
||||||
|| identifier.startsWith("info:hdl")
|
|
||||||
|| identifier.matches("^https?://hdl\\.handle\\.net/.*")
|
|
||||||
|| identifier.matches("^https?://.+/handle/.*")) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
//Check additional prefixes supported in the config file
|
|
||||||
String[] additionalPrefixes = DSpaceServicesFactory.getInstance().getConfigurationService()
|
|
||||||
.getArrayProperty("handle.additional.prefixes");
|
|
||||||
for (String additionalPrefix : additionalPrefixes) {
|
|
||||||
if (identifier.startsWith(additionalPrefix + "/")) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -161,6 +136,7 @@ public class HandleIdentifierProvider extends IdentifierProvider {
|
|||||||
public DSpaceObject resolve(Context context, String identifier, String... attributes) {
|
public DSpaceObject resolve(Context context, String identifier, String... attributes) {
|
||||||
// We can do nothing with this, return null
|
// We can do nothing with this, return null
|
||||||
try {
|
try {
|
||||||
|
identifier = handleService.parseHandle(identifier);
|
||||||
return handleService.resolveToObject(context, identifier);
|
return handleService.resolveToObject(context, identifier);
|
||||||
} catch (IllegalStateException | SQLException e) {
|
} catch (IllegalStateException | SQLException e) {
|
||||||
log.error(LogManager.getHeader(context, "Error while resolving handle to item", "handle: " + identifier),
|
log.error(LogManager.getHeader(context, "Error while resolving handle to item", "handle: " + identifier),
|
||||||
|
@@ -78,33 +78,7 @@ public class VersionedHandleIdentifierProvider extends IdentifierProvider {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean supports(String identifier) {
|
public boolean supports(String identifier) {
|
||||||
String prefix = handleService.getPrefix();
|
return handleService.parseHandle(identifier) != null;
|
||||||
String canonicalPrefix = DSpaceServicesFactory.getInstance().getConfigurationService()
|
|
||||||
.getProperty("handle.canonical.prefix");
|
|
||||||
if (identifier == null) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
// return true if handle has valid starting pattern
|
|
||||||
if (identifier.startsWith(prefix + "/")
|
|
||||||
|| identifier.startsWith(canonicalPrefix)
|
|
||||||
|| identifier.startsWith("hdl:")
|
|
||||||
|| identifier.startsWith("info:hdl")
|
|
||||||
|| identifier.matches("^https?://hdl\\.handle\\.net/.*")
|
|
||||||
|| identifier.matches("^https?://.+/handle/.*")) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
//Check additional prefixes supported in the config file
|
|
||||||
String[] additionalPrefixes = DSpaceServicesFactory.getInstance().getConfigurationService()
|
|
||||||
.getArrayProperty("handle.additional.prefixes");
|
|
||||||
for (String additionalPrefix : additionalPrefixes) {
|
|
||||||
if (identifier.startsWith(additionalPrefix + "/")) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// otherwise, assume invalid handle
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -310,6 +284,7 @@ public class VersionedHandleIdentifierProvider extends IdentifierProvider {
|
|||||||
public DSpaceObject resolve(Context context, String identifier, String... attributes) {
|
public DSpaceObject resolve(Context context, String identifier, String... attributes) {
|
||||||
// We can do nothing with this, return null
|
// We can do nothing with this, return null
|
||||||
try {
|
try {
|
||||||
|
identifier = handleService.parseHandle(identifier);
|
||||||
return handleService.resolveToObject(context, identifier);
|
return handleService.resolveToObject(context, identifier);
|
||||||
} catch (IllegalStateException | SQLException e) {
|
} catch (IllegalStateException | SQLException e) {
|
||||||
log.error(LogManager.getHeader(context, "Error while resolving handle to item", "handle: " + identifier),
|
log.error(LogManager.getHeader(context, "Error while resolving handle to item", "handle: " + identifier),
|
||||||
|
@@ -72,33 +72,7 @@ public class VersionedHandleIdentifierProviderWithCanonicalHandles extends Ident
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean supports(String identifier) {
|
public boolean supports(String identifier) {
|
||||||
String prefix = handleService.getPrefix();
|
return handleService.parseHandle(identifier) != null;
|
||||||
String canonicalPrefix = DSpaceServicesFactory.getInstance().getConfigurationService()
|
|
||||||
.getProperty("handle.canonical.prefix");
|
|
||||||
if (identifier == null) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
// return true if handle has valid starting pattern
|
|
||||||
if (identifier.startsWith(prefix + "/")
|
|
||||||
|| identifier.startsWith(canonicalPrefix)
|
|
||||||
|| identifier.startsWith("hdl:")
|
|
||||||
|| identifier.startsWith("info:hdl")
|
|
||||||
|| identifier.matches("^https?://hdl\\.handle\\.net/.*")
|
|
||||||
|| identifier.matches("^https?://.+/handle/.*")) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
//Check additional prefixes supported in the config file
|
|
||||||
String[] additionalPrefixes = DSpaceServicesFactory.getInstance().getConfigurationService()
|
|
||||||
.getArrayProperty("handle.additional.prefixes");
|
|
||||||
for (String additionalPrefix : additionalPrefixes) {
|
|
||||||
if (identifier.startsWith(additionalPrefix + "/")) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// otherwise, assume invalid handle
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@@ -15,7 +15,11 @@ import java.util.ArrayList;
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import au.com.bytecode.opencsv.CSVReader;
|
import com.opencsv.CSVParser;
|
||||||
|
import com.opencsv.CSVParserBuilder;
|
||||||
|
import com.opencsv.CSVReader;
|
||||||
|
import com.opencsv.CSVReaderBuilder;
|
||||||
|
import com.opencsv.exceptions.CsvException;
|
||||||
import org.dspace.importer.external.exception.FileSourceException;
|
import org.dspace.importer.external.exception.FileSourceException;
|
||||||
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
|
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
|
||||||
import org.dspace.importer.external.metadatamapping.contributor.MetadataContributor;
|
import org.dspace.importer.external.metadatamapping.contributor.MetadataContributor;
|
||||||
@@ -24,7 +28,6 @@ import org.dspace.importer.external.service.components.MetadataSource;
|
|||||||
import org.dspace.importer.external.service.components.dto.PlainMetadataKeyValueItem;
|
import org.dspace.importer.external.service.components.dto.PlainMetadataKeyValueItem;
|
||||||
import org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto;
|
import org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class is an implementation of {@link MetadataSource} which extends {@link AbstractPlainMetadataSource}
|
* This class is an implementation of {@link MetadataSource} which extends {@link AbstractPlainMetadataSource}
|
||||||
* in order to parse "character separated" files like csv, tsv, etc using the Live Import framework.
|
* in order to parse "character separated" files like csv, tsv, etc using the Live Import framework.
|
||||||
@@ -36,7 +39,9 @@ public class CharacterSeparatedImportMetadataSourceServiceImpl extends AbstractP
|
|||||||
|
|
||||||
private char separator = ',';
|
private char separator = ',';
|
||||||
|
|
||||||
private char escapeCharacter = '"';
|
private char quoteCharacter = '"';
|
||||||
|
|
||||||
|
private char escapeCharacter = '\\';
|
||||||
|
|
||||||
private Integer skipLines = 1;
|
private Integer skipLines = 1;
|
||||||
|
|
||||||
@@ -70,6 +75,26 @@ public class CharacterSeparatedImportMetadataSourceServiceImpl extends AbstractP
|
|||||||
this.separator = separator;
|
this.separator = separator;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Method to inject the escape character, usually ". This must be the ASCII integer
|
||||||
|
* related to the char.
|
||||||
|
* In example, 9 for tab, 44 for comma
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public void setQuoteCharacter(char quoteCharacter) {
|
||||||
|
this.quoteCharacter = quoteCharacter;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Method to inject the escape character, usually \. This must be the ASCII integer
|
||||||
|
* related to the char.
|
||||||
|
* In example, 9 for tab, 44 for comma
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public void setEscapeCharacter(char escapeCharacter) {
|
||||||
|
this.escapeCharacter = escapeCharacter;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getImportSource() {
|
public String getImportSource() {
|
||||||
return importSource;
|
return importSource;
|
||||||
@@ -82,15 +107,6 @@ public class CharacterSeparatedImportMetadataSourceServiceImpl extends AbstractP
|
|||||||
this.importSource = importSource;
|
this.importSource = importSource;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Method to inject the escape character. This must be the ASCII integer
|
|
||||||
* related to the char.
|
|
||||||
* In example, 9 for tab, 44 for comma
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
public void setEscapeCharacter(char escapeCharacter) {
|
|
||||||
this.escapeCharacter = escapeCharacter;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The method process any kind of "character separated" files, like CSV, TSV, and so on.
|
* The method process any kind of "character separated" files, like CSV, TSV, and so on.
|
||||||
@@ -110,8 +126,11 @@ public class CharacterSeparatedImportMetadataSourceServiceImpl extends AbstractP
|
|||||||
@Override
|
@Override
|
||||||
protected List<PlainMetadataSourceDto> readData(InputStream inputStream) throws FileSourceException {
|
protected List<PlainMetadataSourceDto> readData(InputStream inputStream) throws FileSourceException {
|
||||||
List<PlainMetadataSourceDto> plainMetadataList = new ArrayList<>();
|
List<PlainMetadataSourceDto> plainMetadataList = new ArrayList<>();
|
||||||
try (CSVReader csvReader = new CSVReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8),
|
CSVParser parser = new CSVParserBuilder().withSeparator(separator).withQuoteChar(quoteCharacter)
|
||||||
separator, escapeCharacter);) {
|
.withEscapeChar(escapeCharacter).build();
|
||||||
|
try (
|
||||||
|
InputStreamReader inputReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8);
|
||||||
|
CSVReader csvReader = new CSVReaderBuilder(inputReader).withCSVParser(parser).build()) {
|
||||||
// read all row
|
// read all row
|
||||||
List<String[]> lines = csvReader.readAll();
|
List<String[]> lines = csvReader.readAll();
|
||||||
int listSize = lines == null ? 0 : lines.size();
|
int listSize = lines == null ? 0 : lines.size();
|
||||||
@@ -139,7 +158,7 @@ public class CharacterSeparatedImportMetadataSourceServiceImpl extends AbstractP
|
|||||||
}
|
}
|
||||||
count++;
|
count++;
|
||||||
}
|
}
|
||||||
} catch (IOException e) {
|
} catch (IOException | CsvException e) {
|
||||||
throw new FileSourceException("Error reading file", e);
|
throw new FileSourceException("Error reading file", e);
|
||||||
}
|
}
|
||||||
return plainMetadataList;
|
return plainMetadataList;
|
||||||
|
@@ -8,17 +8,22 @@
|
|||||||
package org.dspace.importer.external.metadatamapping.contributor;
|
package org.dspace.importer.external.metadatamapping.contributor;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.Reader;
|
||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import au.com.bytecode.opencsv.CSVReader;
|
import com.opencsv.CSVParser;
|
||||||
|
import com.opencsv.CSVParserBuilder;
|
||||||
|
import com.opencsv.CSVReaderBuilder;
|
||||||
|
import com.opencsv.exceptions.CsvException;
|
||||||
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
|
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
|
||||||
import org.dspace.importer.external.service.components.dto.PlainMetadataKeyValueItem;
|
import org.dspace.importer.external.service.components.dto.PlainMetadataKeyValueItem;
|
||||||
import org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto;
|
import org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class implements functionalities to handle common situation regarding plain metadata.
|
* This class implements functionalities to handle common situation regarding plain metadata.
|
||||||
* In some scenario, like csv or tsv, the format don't allow lists.
|
* In some scenario, like csv or tsv, the format don't allow lists.
|
||||||
@@ -33,7 +38,9 @@ public class EnhancedSimpleMetadataContributor extends SimpleMetadataContributor
|
|||||||
|
|
||||||
private char delimiter = ',';
|
private char delimiter = ',';
|
||||||
|
|
||||||
private char escape = '"';
|
private char quote = '"';
|
||||||
|
|
||||||
|
private char escape = '\\';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This method could be used to set the delimiter used during parse
|
* This method could be used to set the delimiter used during parse
|
||||||
@@ -51,11 +58,24 @@ public class EnhancedSimpleMetadataContributor extends SimpleMetadataContributor
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Method to inject the escape character.
|
* This method could be used to get the quote char used in this class
|
||||||
* This must be the ASCII integer
|
*/
|
||||||
* related to the char.
|
public char getQuote() {
|
||||||
* In example, 9 for tab, 44 for comma
|
return quote;
|
||||||
* If no escape is set, double quote will be used
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method could be used to set the quote char used during parse
|
||||||
|
* If no quote char is set, " will be used
|
||||||
|
*/
|
||||||
|
public void setQuote(char quote) {
|
||||||
|
this.quote = quote;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Method to inject the escape character, usually the ". This must be the ASCII
|
||||||
|
* integer related to the char.
|
||||||
|
* In example, 9 for tab, 44 for comma If no escape is set, double quote will be used
|
||||||
*/
|
*/
|
||||||
public void setEscape(char escape) {
|
public void setEscape(char escape) {
|
||||||
this.escape = escape;
|
this.escape = escape;
|
||||||
@@ -94,10 +114,12 @@ public class EnhancedSimpleMetadataContributor extends SimpleMetadataContributor
|
|||||||
// For example, list of author must be: Author 1, author 2, author 3
|
// For example, list of author must be: Author 1, author 2, author 3
|
||||||
// if author name contains comma, is important to escape its in
|
// if author name contains comma, is important to escape its in
|
||||||
// this way: Author 1, \"Author 2, something\", Author 3
|
// this way: Author 1, \"Author 2, something\", Author 3
|
||||||
try (CSVReader csvReader = new CSVReader(new StringReader(value),
|
CSVParser parser = new CSVParserBuilder().withSeparator(delimiter).withQuoteChar(quote).withEscapeChar(escape)
|
||||||
delimiter, escape);) {
|
.build();
|
||||||
|
try ( Reader inputReader = new StringReader(value);
|
||||||
|
com.opencsv.CSVReader csvReader = new CSVReaderBuilder(inputReader).withCSVParser(parser).build()) {
|
||||||
rows = csvReader.readAll();
|
rows = csvReader.readAll();
|
||||||
} catch (IOException e) {
|
} catch (IOException | CsvException e) {
|
||||||
//fallback, use the inpu as value
|
//fallback, use the inpu as value
|
||||||
return new String[] { value };
|
return new String[] { value };
|
||||||
}
|
}
|
||||||
|
@@ -130,15 +130,18 @@ public class PubmedDateMetadatumContributor<T> implements MetadataContributor<T>
|
|||||||
} catch (ParseException e) {
|
} catch (ParseException e) {
|
||||||
// Multiple dateformats can be configured, we don't want to print the entire stacktrace every
|
// Multiple dateformats can be configured, we don't want to print the entire stacktrace every
|
||||||
// time one of those formats fails.
|
// time one of those formats fails.
|
||||||
log.info(
|
log.debug(
|
||||||
"Failed parsing " + dateString + " using the following format: " + dateFormat + ", check " +
|
"Failed parsing " + dateString + " using the following format: " + dateFormat + ", check " +
|
||||||
"the configured dataformats in config/spring/api/pubmed-integration.xml");
|
"the configured dataformats in config/spring/api/pubmed-integration.xml");
|
||||||
}
|
}
|
||||||
j++;
|
j++;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (dcDate != null) {
|
if (dcDate != null) {
|
||||||
values.add(metadataFieldMapping.toDCValue(field, dcDate.toString()));
|
values.add(metadataFieldMapping.toDCValue(field, dcDate.toString()));
|
||||||
|
} else {
|
||||||
|
log.info(
|
||||||
|
"Failed parsing " + dateString + ", check " +
|
||||||
|
"the configured dataformats in config/spring/api/pubmed-integration.xml");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
@@ -51,7 +51,8 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
|||||||
|
|
||||||
private String baseAddress;
|
private String baseAddress;
|
||||||
|
|
||||||
private WebTarget pubmedWebTarget;
|
// it is protected so that subclass can mock it for testing
|
||||||
|
protected WebTarget pubmedWebTarget;
|
||||||
|
|
||||||
private List<String> supportedExtensions;
|
private List<String> supportedExtensions;
|
||||||
|
|
||||||
|
@@ -11,6 +11,7 @@ import javax.servlet.http.HttpServletRequest;
|
|||||||
|
|
||||||
import org.apache.commons.lang3.ArrayUtils;
|
import org.apache.commons.lang3.ArrayUtils;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.dspace.core.Utils;
|
||||||
import org.dspace.service.ClientInfoService;
|
import org.dspace.service.ClientInfoService;
|
||||||
import org.dspace.services.ConfigurationService;
|
import org.dspace.services.ConfigurationService;
|
||||||
import org.dspace.statistics.util.IPTable;
|
import org.dspace.statistics.util.IPTable;
|
||||||
@@ -41,8 +42,7 @@ public class ClientInfoServiceImpl implements ClientInfoService {
|
|||||||
@Autowired(required = true)
|
@Autowired(required = true)
|
||||||
public ClientInfoServiceImpl(ConfigurationService configurationService) {
|
public ClientInfoServiceImpl(ConfigurationService configurationService) {
|
||||||
this.configurationService = configurationService;
|
this.configurationService = configurationService;
|
||||||
this.trustedProxies = parseTrustedProxyRanges(
|
this.trustedProxies = parseTrustedProxyRanges();
|
||||||
configurationService.getArrayProperty("proxies.trusted.ipranges"));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -62,9 +62,8 @@ public class ClientInfoServiceImpl implements ClientInfoService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
} else if (StringUtils.isNotBlank(xForwardedForHeaderValue)) {
|
} else if (StringUtils.isNotBlank(xForwardedForHeaderValue)) {
|
||||||
log.warn(
|
log.warn("X-Forwarded-For header sent from client, but useProxies is not enabled. " +
|
||||||
"X-Forwarded-For header detected but useProxiesEnabled is not enabled. " +
|
"To trust X-Forwarded-For headers, set useProxies=true.");
|
||||||
"If your dspace is behind a proxy set it to true");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return ip;
|
return ip;
|
||||||
@@ -74,55 +73,120 @@ public class ClientInfoServiceImpl implements ClientInfoService {
|
|||||||
public boolean isUseProxiesEnabled() {
|
public boolean isUseProxiesEnabled() {
|
||||||
if (useProxiesEnabled == null) {
|
if (useProxiesEnabled == null) {
|
||||||
useProxiesEnabled = configurationService.getBooleanProperty("useProxies", true);
|
useProxiesEnabled = configurationService.getBooleanProperty("useProxies", true);
|
||||||
log.info("useProxies=" + useProxiesEnabled);
|
log.info("Proxies (useProxies) enabled? " + useProxiesEnabled);
|
||||||
}
|
}
|
||||||
|
|
||||||
return useProxiesEnabled;
|
return useProxiesEnabled;
|
||||||
}
|
}
|
||||||
|
|
||||||
private IPTable parseTrustedProxyRanges(String[] proxyProperty) {
|
/**
|
||||||
if (ArrayUtils.isEmpty(proxyProperty)) {
|
* Parse / Determine trusted proxies based on configuration. "Trusted" proxies are the IP addresses from which we'll
|
||||||
return null;
|
* allow the X-FORWARDED-FOR header. We don't accept that header from any IP address, as the header could be used
|
||||||
} else {
|
* to spoof/fake your IP address.
|
||||||
//Load all supplied proxy IP ranges into the IP table
|
* <P>
|
||||||
|
* If "proxies.trusted.include_ui_ip = true" (which is the default), then we lookup the IP address(es) associated
|
||||||
|
* with ${dspace.ui.url}, and append them to the list of trusted proxies. This is necessary to allow the Angular
|
||||||
|
* UI server-side rendering (SSR) to send us the X-FORWARDED-FOR header, which it usually uses to specify the
|
||||||
|
* original client IP address.
|
||||||
|
* <P>
|
||||||
|
* If "proxies.trusted.ipranges" configuration is specified, those IP addresses/ranges are also included in the
|
||||||
|
* list of trusted proxies.
|
||||||
|
* <P>
|
||||||
|
* Localhost (127.0.0.1) is ALWAYS included in the list of trusted proxies
|
||||||
|
*
|
||||||
|
* @return IPTable of trusted IP addresses/ranges, or null if none could be found.
|
||||||
|
*/
|
||||||
|
private IPTable parseTrustedProxyRanges() {
|
||||||
|
String localhostIP = "127.0.0.1";
|
||||||
IPTable ipTable = new IPTable();
|
IPTable ipTable = new IPTable();
|
||||||
|
|
||||||
|
// Get list of trusted proxy IP ranges
|
||||||
|
String[] trustedIpRanges = configurationService.getArrayProperty("proxies.trusted.ipranges");
|
||||||
|
// Always append localhost (127.0.0.1) to the list of trusted proxies, if not already included
|
||||||
|
if (!ArrayUtils.contains(trustedIpRanges, localhostIP)) {
|
||||||
|
trustedIpRanges = ArrayUtils.add(trustedIpRanges, localhostIP);
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
for (String proxyRange : proxyProperty) {
|
// Load all IPs into our IP Table
|
||||||
ipTable.add(proxyRange);
|
for (String ipRange : trustedIpRanges) {
|
||||||
|
ipTable.add(ipRange);
|
||||||
}
|
}
|
||||||
} catch (IPTable.IPFormatException e) {
|
} catch (IPTable.IPFormatException e) {
|
||||||
log.error("Property proxies.trusted.ipranges contains an invalid IP range", e);
|
log.error("Property 'proxies.trusted.ipranges' contains an invalid IP range", e);
|
||||||
ipTable = null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Is the UI IP address always trusted (default = true)
|
||||||
|
boolean uiIsTrustedProxy = configurationService.getBooleanProperty("proxies.trusted.include_ui_ip", true);
|
||||||
|
|
||||||
|
// As long as the UI is a trusted proxy, determine IP(s) of ${dspace.ui.url}
|
||||||
|
if (uiIsTrustedProxy) {
|
||||||
|
String uiUrl = configurationService.getProperty("dspace.ui.url");
|
||||||
|
// Get any IP address(es) associated with our UI
|
||||||
|
String[] uiIpAddresses = Utils.getIPAddresses(uiUrl);
|
||||||
|
|
||||||
|
if (ArrayUtils.isNotEmpty(uiIpAddresses)) {
|
||||||
|
try {
|
||||||
|
// Load all UI IPs into our IP Table
|
||||||
|
for (String ipRange : uiIpAddresses) {
|
||||||
|
ipTable.add(ipRange);
|
||||||
|
}
|
||||||
|
} catch (IPTable.IPFormatException e) {
|
||||||
|
log.error("IP address lookup for dspace.ui.url={} was invalid and could not be added to trusted" +
|
||||||
|
" proxies", uiUrl, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If our IPTable is not empty, log the trusted proxies and return it
|
||||||
|
if (!ipTable.isEmpty()) {
|
||||||
|
log.info("Trusted proxies (configure via 'proxies.trusted.ipranges'): {}", ipTable.toSet().toString());
|
||||||
return ipTable;
|
return ipTable;
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Whether a request is from a trusted proxy or not. Only returns true if trusted proxies are specified
|
||||||
|
* and the ipAddress is contained in those proxies. False in all other cases
|
||||||
|
* @param ipAddress IP address to check for
|
||||||
|
* @return true if trusted, false otherwise
|
||||||
|
*/
|
||||||
private boolean isRequestFromTrustedProxy(String ipAddress) {
|
private boolean isRequestFromTrustedProxy(String ipAddress) {
|
||||||
try {
|
try {
|
||||||
return trustedProxies == null || trustedProxies.contains(ipAddress);
|
return trustedProxies != null && trustedProxies.contains(ipAddress);
|
||||||
} catch (IPTable.IPFormatException e) {
|
} catch (IPTable.IPFormatException e) {
|
||||||
log.error("Request contains invalid remote address", e);
|
log.error("Request contains invalid remote address", e);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the first X-FORWARDED-FOR header value which does not match the IP or another proxy IP. This is the most
|
||||||
|
* likely client IP address when proxies are in use.
|
||||||
|
* <P>
|
||||||
|
* NOTE: This method does NOT validate the X-FORWARDED-FOR header value is accurate, so be aware this header
|
||||||
|
* could contain a spoofed value. Therefore, any code calling this method should verify the source is somehow
|
||||||
|
* trusted, e.g. by using isRequestFromTrustedProxy() or similar.
|
||||||
|
* @param remoteIp remote IP address
|
||||||
|
* @param xForwardedForValue X-FORWARDED-FOR header value passed by that address
|
||||||
|
* @return likely client IP address from X-FORWARDED-FOR header
|
||||||
|
*/
|
||||||
private String getXForwardedForIpValue(String remoteIp, String xForwardedForValue) {
|
private String getXForwardedForIpValue(String remoteIp, String xForwardedForValue) {
|
||||||
String ip = null;
|
String ip = null;
|
||||||
|
|
||||||
/* This header is a comma delimited list */
|
/* This header is a comma delimited list */
|
||||||
String headerValue = StringUtils.trimToEmpty(xForwardedForValue);
|
String headerValue = StringUtils.trimToEmpty(xForwardedForValue);
|
||||||
for (String xfip : headerValue.split(",")) {
|
for (String xfip : headerValue.split(",")) {
|
||||||
|
xfip = xfip.trim();
|
||||||
/* proxy itself will sometime populate this header with the same value in
|
/* proxy itself will sometime populate this header with the same value in
|
||||||
remote address. ordering in spec is vague, we'll just take the last
|
remote address. ordering in spec is vague, we'll just take the last
|
||||||
not equal to the proxy
|
not equal to the proxy
|
||||||
*/
|
*/
|
||||||
if (!StringUtils.equals(remoteIp, xfip) && StringUtils.isNotBlank(xfip)
|
if (!StringUtils.equals(remoteIp, xfip) && StringUtils.isNotBlank(xfip)
|
||||||
// if we have trusted proxies, we'll assume that they are not the client IP
|
// if we have trusted proxies, we'll assume that they are not the client IP
|
||||||
&& (trustedProxies == null || !isRequestFromTrustedProxy(xfip))) {
|
&& !isRequestFromTrustedProxy(xfip)) {
|
||||||
|
ip = xfip;
|
||||||
ip = xfip.trim();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -17,7 +17,8 @@ import java.util.HashMap;
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import au.com.bytecode.opencsv.CSVWriter;
|
import com.opencsv.CSVWriterBuilder;
|
||||||
|
import com.opencsv.ICSVWriter;
|
||||||
import org.apache.commons.lang3.ArrayUtils;
|
import org.apache.commons.lang3.ArrayUtils;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
|
||||||
@@ -232,7 +233,7 @@ public class Dataset {
|
|||||||
|
|
||||||
public ByteArrayOutputStream exportAsCSV() throws IOException {
|
public ByteArrayOutputStream exportAsCSV() throws IOException {
|
||||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||||
CSVWriter ecsvp = new CSVWriter(new OutputStreamWriter(baos), ';');
|
ICSVWriter ecsvp = new CSVWriterBuilder(new OutputStreamWriter(baos)).withSeparator(';').build();
|
||||||
//Generate the item row
|
//Generate the item row
|
||||||
List<String> colLabels = getColLabels();
|
List<String> colLabels = getColLabels();
|
||||||
colLabels.add(0, "");
|
colLabels.add(0, "");
|
||||||
|
@@ -37,11 +37,11 @@ import java.util.Map;
|
|||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import javax.servlet.http.HttpServletRequest;
|
import javax.servlet.http.HttpServletRequest;
|
||||||
|
|
||||||
import au.com.bytecode.opencsv.CSVReader;
|
|
||||||
import au.com.bytecode.opencsv.CSVWriter;
|
|
||||||
import com.maxmind.geoip2.DatabaseReader;
|
import com.maxmind.geoip2.DatabaseReader;
|
||||||
import com.maxmind.geoip2.exception.GeoIp2Exception;
|
import com.maxmind.geoip2.exception.GeoIp2Exception;
|
||||||
import com.maxmind.geoip2.model.CityResponse;
|
import com.maxmind.geoip2.model.CityResponse;
|
||||||
|
import com.opencsv.CSVReader;
|
||||||
|
import com.opencsv.CSVWriter;
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.commons.lang3.ArrayUtils;
|
import org.apache.commons.lang3.ArrayUtils;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
@@ -200,6 +200,13 @@ public class IPTable {
|
|||||||
return set;
|
return set;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return whether IPTable is empty (having no entries)
|
||||||
|
* @return true if empty, false otherwise
|
||||||
|
*/
|
||||||
|
public boolean isEmpty() {
|
||||||
|
return map.isEmpty();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Exception Class to deal with IPFormat errors.
|
* Exception Class to deal with IPFormat errors.
|
||||||
|
@@ -1,54 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.extraction;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import gr.ekt.bte.dataloader.FileDataLoader;
|
|
||||||
import org.dspace.services.ConfigurationService;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Configuration bean to associate a BTE FileDataLoader with a specific list of format identified by the file
|
|
||||||
* extensions. See config/spring/api/metadata-extractor.xml
|
|
||||||
*
|
|
||||||
* @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.it)
|
|
||||||
* @author Andrea Bollini (andrea.bollini at 4science.it)
|
|
||||||
*/
|
|
||||||
public class MetadataExtractor {
|
|
||||||
|
|
||||||
private List<String> extensions;
|
|
||||||
|
|
||||||
private FileDataLoader dataLoader;
|
|
||||||
|
|
||||||
private ConfigurationService configurationService;
|
|
||||||
|
|
||||||
public List<String> getExtensions() {
|
|
||||||
return extensions;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setExtensions(List<String> mime) {
|
|
||||||
this.extensions = mime;
|
|
||||||
}
|
|
||||||
|
|
||||||
public FileDataLoader getDataLoader() {
|
|
||||||
return dataLoader;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDataLoader(FileDataLoader dataLoader) {
|
|
||||||
this.dataLoader = dataLoader;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ConfigurationService getConfigurationService() {
|
|
||||||
return configurationService;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setConfigurationService(ConfigurationService configurationService) {
|
|
||||||
this.configurationService = configurationService;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@@ -7,55 +7,37 @@
|
|||||||
*/
|
*/
|
||||||
package org.dspace.submit.listener;
|
package org.dspace.submit.listener;
|
||||||
|
|
||||||
import java.util.Map;
|
import java.util.Set;
|
||||||
|
|
||||||
import gr.ekt.bte.core.DataLoader;
|
import org.dspace.content.Item;
|
||||||
import org.dspace.services.ConfigurationService;
|
import org.dspace.core.Context;
|
||||||
|
import org.dspace.external.model.ExternalDataObject;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Configuration bean to map metadata to identifiers (i.e dc.identifier.doi -> doi, dc.identifier.isbn -> isbn) and
|
* The interface to implement to support the ExtractMetadata enrichment step
|
||||||
* alias to BTE Data Loader. See config/spring/api/step-processing.xml
|
|
||||||
*
|
*
|
||||||
* @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.it)
|
|
||||||
* @author Andrea Bollini (andrea.bollini at 4science.it)
|
* @author Andrea Bollini (andrea.bollini at 4science.it)
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
public class MetadataListener {
|
public interface MetadataListener {
|
||||||
|
/**
|
||||||
|
* Return the list of metadata that should be monitored as change to them could
|
||||||
|
* allow the service to retrieve an ExternalDataObject to enrich the current
|
||||||
|
* item
|
||||||
|
*
|
||||||
|
* @return the list of metadata to monitor
|
||||||
|
*/
|
||||||
|
public Set<String> getMetadataToListen();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Metadata to identifier map
|
* Retrieve an ExternalDataObject to enrich the current item using the current
|
||||||
|
* metadata and the information about which listened metadata are changed
|
||||||
|
*
|
||||||
|
* @param context the DSpace Context Object
|
||||||
|
* @param item the item in its current status
|
||||||
|
* @param changedMetadata the list of listened metadata that are changed
|
||||||
|
* @return an ExternalDataObject that can be used to enrich the current item
|
||||||
*/
|
*/
|
||||||
private Map<String, String> metadata;
|
public ExternalDataObject getExternalDataObject(Context context, Item item, Set<String> changedMetadata);
|
||||||
|
|
||||||
private ConfigurationService configurationService;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Alias to data loader map
|
|
||||||
*/
|
|
||||||
private Map<String, DataLoader> dataloadersMap;
|
|
||||||
|
|
||||||
public ConfigurationService getConfigurationService() {
|
|
||||||
return configurationService;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setConfigurationService(ConfigurationService configurationService) {
|
|
||||||
this.configurationService = configurationService;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Map<String, String> getMetadata() {
|
|
||||||
return metadata;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setMetadata(Map<String, String> metadata) {
|
|
||||||
this.metadata = metadata;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Map<String, DataLoader> getDataloadersMap() {
|
|
||||||
return dataloadersMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDataloadersMap(Map<String, DataLoader> dataloadersMap) {
|
|
||||||
this.dataloadersMap = dataloadersMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -0,0 +1,99 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.submit.listener;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.dspace.content.Item;
|
||||||
|
import org.dspace.content.MetadataValue;
|
||||||
|
import org.dspace.content.factory.ContentServiceFactory;
|
||||||
|
import org.dspace.content.service.ItemService;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
import org.dspace.external.model.ExternalDataObject;
|
||||||
|
import org.dspace.external.provider.ExternalDataProvider;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This is the basic implementation for the MetadataListener interface.
|
||||||
|
*
|
||||||
|
* It got the a map of metadata and related External Data Provider that can be
|
||||||
|
* used to retrieve further information using the updated metadata in the item
|
||||||
|
*
|
||||||
|
* @author Andrea Bollini (andrea.bollini at 4science.it)
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public class SimpleMetadataListener implements MetadataListener {
|
||||||
|
/**
|
||||||
|
* A map to link a specific metadata with an ExternalDataProvider
|
||||||
|
*/
|
||||||
|
private Map<String, List<ExternalDataProvider>> externalDataProvidersMap;
|
||||||
|
|
||||||
|
private ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||||
|
|
||||||
|
public Map<String, List<ExternalDataProvider>> getExternalDataProvidersMap() {
|
||||||
|
return externalDataProvidersMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setExternalDataProvidersMap(Map<String, List<ExternalDataProvider>> externalDataProvidersMap) {
|
||||||
|
this.externalDataProvidersMap = externalDataProvidersMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<String> getMetadataToListen() {
|
||||||
|
return externalDataProvidersMap.keySet();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ExternalDataObject getExternalDataObject(Context context, Item item, Set<String> changedMetadata) {
|
||||||
|
// we loop over the available provider and return the first found object
|
||||||
|
for (String m : changedMetadata) {
|
||||||
|
List<ExternalDataProvider> providers = externalDataProvidersMap.get(m);
|
||||||
|
for (ExternalDataProvider prov : providers) {
|
||||||
|
String id = generateExternalId(context, prov, item, changedMetadata, m);
|
||||||
|
if (StringUtils.isNotBlank(id)) {
|
||||||
|
Optional<ExternalDataObject> result = prov.getExternalDataObject(id);
|
||||||
|
if (result.isPresent()) {
|
||||||
|
return result.get();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This is the simpler implementation, it assumes that the value of the metadata
|
||||||
|
* listened by the DataProvider can be used directly as identifier. Subclass may
|
||||||
|
* extend it to add support for identifier normalization or combine multiple
|
||||||
|
* information to build the identifier
|
||||||
|
*
|
||||||
|
* @param context the DSpace Context Object
|
||||||
|
* @param prov the ExternalDataProvider that need to received an Id
|
||||||
|
* @param item the item
|
||||||
|
* @param changedMetadata the metadata that are recently changed
|
||||||
|
* @param m the changed metadata that lead to the selected
|
||||||
|
* ExternalDataProvider
|
||||||
|
* @return an Id if any that can be used to query the {@link ExternalDataProvider}
|
||||||
|
*/
|
||||||
|
protected String generateExternalId(Context context, ExternalDataProvider prov, Item item,
|
||||||
|
Set<String> changedMetadata, String m) {
|
||||||
|
List<MetadataValue> metadataByMetadataString = itemService.getMetadataByMetadataString(item, m);
|
||||||
|
// only suggest an identifier if there is exactly one value for the metadata. If
|
||||||
|
// there are more values it is highly probable that a lookup was already
|
||||||
|
// performed when the first value was added
|
||||||
|
if (metadataByMetadataString != null && metadataByMetadataString.size() == 1) {
|
||||||
|
return metadataByMetadataString.get(0).getValue();
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@@ -1,144 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.FileInputStream;
|
|
||||||
import java.io.FileNotFoundException;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.InputStream;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import javax.xml.parsers.DocumentBuilder;
|
|
||||||
import javax.xml.parsers.DocumentBuilderFactory;
|
|
||||||
import javax.xml.parsers.ParserConfigurationException;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.DataLoadingSpec;
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import gr.ekt.bte.core.RecordSet;
|
|
||||||
import gr.ekt.bte.core.Value;
|
|
||||||
import gr.ekt.bte.dataloader.FileDataLoader;
|
|
||||||
import gr.ekt.bte.exceptions.MalformedSourceException;
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.w3c.dom.Document;
|
|
||||||
import org.w3c.dom.Element;
|
|
||||||
import org.xml.sax.SAXException;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Load metadata from CiNii formated file
|
|
||||||
*
|
|
||||||
* @author Keiji Suzuki
|
|
||||||
*/
|
|
||||||
public class CiNiiFileDataLoader extends FileDataLoader {
|
|
||||||
|
|
||||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(CiNiiFileDataLoader.class);
|
|
||||||
|
|
||||||
Map<String, String> fieldMap; // mapping between service fields and local
|
|
||||||
// intermediate fields
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Empty constructor
|
|
||||||
*/
|
|
||||||
public CiNiiFileDataLoader() {
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param filename Name of file to load CiNii data from.
|
|
||||||
*/
|
|
||||||
public CiNiiFileDataLoader(String filename) {
|
|
||||||
super(filename);
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* {@see gr.ekt.bte.core.DataLoader#getRecords()}
|
|
||||||
*
|
|
||||||
* @throws MalformedSourceException
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public RecordSet getRecords() throws MalformedSourceException {
|
|
||||||
|
|
||||||
RecordSet recordSet = new RecordSet();
|
|
||||||
|
|
||||||
try {
|
|
||||||
InputStream inputStream = new FileInputStream(new File(filename));
|
|
||||||
|
|
||||||
DocumentBuilderFactory factory = DocumentBuilderFactory
|
|
||||||
.newInstance();
|
|
||||||
factory.setValidating(false);
|
|
||||||
factory.setIgnoringComments(true);
|
|
||||||
factory.setIgnoringElementContentWhitespace(true);
|
|
||||||
|
|
||||||
DocumentBuilder db = factory.newDocumentBuilder();
|
|
||||||
Document inDoc = db.parse(inputStream);
|
|
||||||
|
|
||||||
Element xmlRoot = inDoc.getDocumentElement();
|
|
||||||
|
|
||||||
// There is no element to represent an record, so we can not process
|
|
||||||
// multi records at once.
|
|
||||||
Record record = CiNiiUtils.convertCiNiiDomToRecord(xmlRoot);
|
|
||||||
if (record != null) {
|
|
||||||
recordSet.addRecord(convertFields(record));
|
|
||||||
}
|
|
||||||
} catch (FileNotFoundException e) {
|
|
||||||
log.error(e.getMessage(), e);
|
|
||||||
} catch (ParserConfigurationException e) {
|
|
||||||
log.error(e.getMessage(), e);
|
|
||||||
} catch (SAXException e) {
|
|
||||||
log.error(e.getMessage(), e);
|
|
||||||
} catch (IOException e) {
|
|
||||||
log.error(e.getMessage(), e);
|
|
||||||
}
|
|
||||||
|
|
||||||
return recordSet;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* (non-Javadoc)
|
|
||||||
*
|
|
||||||
* @see
|
|
||||||
* gr.ekt.bte.core.DataLoader#getRecords(gr.ekt.bte.core.DataLoadingSpec)
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public RecordSet getRecords(DataLoadingSpec spec)
|
|
||||||
throws MalformedSourceException {
|
|
||||||
if (spec.getOffset() > 0) {
|
|
||||||
return new RecordSet();
|
|
||||||
}
|
|
||||||
|
|
||||||
return getRecords();
|
|
||||||
}
|
|
||||||
|
|
||||||
public Record convertFields(Record publication) {
|
|
||||||
for (String fieldName : fieldMap.keySet()) {
|
|
||||||
String md = null;
|
|
||||||
if (fieldMap != null) {
|
|
||||||
md = this.fieldMap.get(fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (StringUtils.isBlank(md)) {
|
|
||||||
continue;
|
|
||||||
} else {
|
|
||||||
md = md.trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (publication.isMutable()) {
|
|
||||||
List<Value> values = publication.getValues(fieldName);
|
|
||||||
publication.makeMutable().removeField(fieldName);
|
|
||||||
publication.makeMutable().addField(md, values);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return publication;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setFieldMap(Map<String, String> fieldMap) {
|
|
||||||
this.fieldMap = fieldMap;
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,107 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import org.apache.http.HttpException;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Load metadata from CiNii RDF API
|
|
||||||
*
|
|
||||||
* @author Keiji Suzuki
|
|
||||||
*/
|
|
||||||
public class CiNiiOnlineDataLoader extends NetworkSubmissionLookupDataLoader {
|
|
||||||
protected CiNiiService ciniiService = new CiNiiService();
|
|
||||||
|
|
||||||
protected boolean searchProvider = true;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Application id to use CiNii
|
|
||||||
*/
|
|
||||||
protected String appId = null;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* max result number to return
|
|
||||||
*/
|
|
||||||
protected int maxResults = 10;
|
|
||||||
|
|
||||||
public void setCiNiiService(CiNiiService ciniiService) {
|
|
||||||
this.ciniiService = ciniiService;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public List<String> getSupportedIdentifiers() {
|
|
||||||
return Arrays.asList(new String[] {CINII});
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setSearchProvider(boolean searchProvider) {
|
|
||||||
this.searchProvider = searchProvider;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isSearchProvider() {
|
|
||||||
return searchProvider;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public List<Record> getByIdentifier(Context context,
|
|
||||||
Map<String, Set<String>> keys) throws HttpException, IOException {
|
|
||||||
if (appId == null) {
|
|
||||||
throw new RuntimeException("No CiNii Application ID is specified!");
|
|
||||||
}
|
|
||||||
|
|
||||||
List<Record> results = new ArrayList<Record>();
|
|
||||||
if (keys != null) {
|
|
||||||
Set<String> ciniiids = keys.get(CINII);
|
|
||||||
if (ciniiids != null && ciniiids.size() > 0) {
|
|
||||||
for (String ciniiid : ciniiids) {
|
|
||||||
Record record = ciniiService.getByCiNiiID(ciniiid, getAppId());
|
|
||||||
if (record != null) {
|
|
||||||
results.add(convertFields(record));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public List<Record> search(Context context, String title, String author, int year)
|
|
||||||
throws HttpException, IOException {
|
|
||||||
if (appId == null) {
|
|
||||||
throw new RuntimeException("No CiNii Application ID is specified!");
|
|
||||||
}
|
|
||||||
|
|
||||||
return ciniiService.searchByTerm(title, author, year,
|
|
||||||
getMaxResults(), getAppId());
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getAppId() {
|
|
||||||
return appId;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setAppId(String appId) {
|
|
||||||
this.appId = appId;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getMaxResults() {
|
|
||||||
return maxResults;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setMaxResults(int maxResults) {
|
|
||||||
this.maxResults = maxResults;
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,221 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.net.URLEncoder;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
import javax.xml.parsers.DocumentBuilder;
|
|
||||||
import javax.xml.parsers.DocumentBuilderFactory;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import org.apache.http.HttpException;
|
|
||||||
import org.apache.http.HttpResponse;
|
|
||||||
import org.apache.http.HttpStatus;
|
|
||||||
import org.apache.http.StatusLine;
|
|
||||||
import org.apache.http.client.config.RequestConfig;
|
|
||||||
import org.apache.http.client.methods.HttpGet;
|
|
||||||
import org.apache.http.impl.client.CloseableHttpClient;
|
|
||||||
import org.apache.http.impl.client.HttpClientBuilder;
|
|
||||||
import org.apache.logging.log4j.LogManager;
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.dspace.app.util.XMLUtils;
|
|
||||||
import org.w3c.dom.Document;
|
|
||||||
import org.w3c.dom.Element;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Keiji Suzuki
|
|
||||||
*/
|
|
||||||
public class CiNiiService {
|
|
||||||
/**
|
|
||||||
* log4j category
|
|
||||||
*/
|
|
||||||
private static final Logger log = LogManager.getLogger(CiNiiService.class);
|
|
||||||
|
|
||||||
protected int timeout = 1000;
|
|
||||||
|
|
||||||
public void setTimeout(int timeout) {
|
|
||||||
this.timeout = timeout;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Record getByCiNiiID(String id, String appId) throws HttpException,
|
|
||||||
IOException {
|
|
||||||
return search(id, appId);
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<Record> searchByTerm(String title, String author, int year,
|
|
||||||
int maxResults, String appId)
|
|
||||||
throws HttpException, IOException {
|
|
||||||
List<Record> records = new ArrayList<>();
|
|
||||||
|
|
||||||
List<String> ids = getCiNiiIDs(title, author, year, maxResults, appId);
|
|
||||||
if (ids != null && ids.size() > 0) {
|
|
||||||
for (String id : ids) {
|
|
||||||
Record record = search(id, appId);
|
|
||||||
if (record != null) {
|
|
||||||
records.add(record);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return records;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get metadata by searching CiNii RDF API with CiNii NAID
|
|
||||||
*
|
|
||||||
* @param id CiNii NAID to search by
|
|
||||||
* @param appId registered application identifier for the API
|
|
||||||
* @return record metadata
|
|
||||||
* @throws IOException A general class of exceptions produced by failed or interrupted I/O operations.
|
|
||||||
* @throws HttpException Represents a XML/HTTP fault and provides access to the HTTP status code.
|
|
||||||
*/
|
|
||||||
protected Record search(String id, String appId)
|
|
||||||
throws IOException, HttpException {
|
|
||||||
HttpGet method = null;
|
|
||||||
try ( CloseableHttpClient client = HttpClientBuilder.create().build(); ) {
|
|
||||||
RequestConfig requestConfig = RequestConfig.custom()
|
|
||||||
.setConnectTimeout(timeout)
|
|
||||||
.build();
|
|
||||||
method = new HttpGet("http://ci.nii.ac.jp/naid/" + id + ".rdf?appid=" + appId);
|
|
||||||
method.setConfig(requestConfig);
|
|
||||||
|
|
||||||
// Execute the method.
|
|
||||||
HttpResponse response = client.execute(method);
|
|
||||||
StatusLine statusLine = response.getStatusLine();
|
|
||||||
int statusCode = statusLine.getStatusCode();
|
|
||||||
|
|
||||||
if (statusCode != HttpStatus.SC_OK) {
|
|
||||||
if (statusCode == HttpStatus.SC_BAD_REQUEST) {
|
|
||||||
throw new RuntimeException("CiNii RDF is not valid");
|
|
||||||
} else {
|
|
||||||
throw new RuntimeException("CiNii RDF Http call failed: "
|
|
||||||
+ statusLine);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
DocumentBuilderFactory factory = DocumentBuilderFactory
|
|
||||||
.newInstance();
|
|
||||||
factory.setValidating(false);
|
|
||||||
factory.setIgnoringComments(true);
|
|
||||||
factory.setIgnoringElementContentWhitespace(true);
|
|
||||||
// disallow DTD parsing to ensure no XXE attacks can occur.
|
|
||||||
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
|
|
||||||
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
|
||||||
|
|
||||||
DocumentBuilder db = factory.newDocumentBuilder();
|
|
||||||
Document inDoc = db.parse(response.getEntity().getContent());
|
|
||||||
|
|
||||||
Element xmlRoot = inDoc.getDocumentElement();
|
|
||||||
|
|
||||||
return CiNiiUtils.convertCiNiiDomToRecord(xmlRoot);
|
|
||||||
} catch (Exception e) {
|
|
||||||
throw new RuntimeException(
|
|
||||||
"CiNii RDF identifier is not valid or not exist");
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
if (method != null) {
|
|
||||||
method.releaseConnection();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get CiNii NAIDs by searching CiNii OpenURL API with title, author and year
|
|
||||||
*
|
|
||||||
* @param title record title
|
|
||||||
* @param author record author
|
|
||||||
* @param year record year
|
|
||||||
* @param maxResults maximum number of results returned
|
|
||||||
* @param appId registered application identifier for the API
|
|
||||||
* @return matching NAIDs
|
|
||||||
* @throws IOException A general class of exceptions produced by failed or interrupted I/O operations.
|
|
||||||
* @throws HttpException Represents a XML/HTTP fault and provides access to the HTTP status code.
|
|
||||||
*/
|
|
||||||
protected List<String> getCiNiiIDs(String title, String author, int year,
|
|
||||||
int maxResults, String appId)
|
|
||||||
throws IOException, HttpException {
|
|
||||||
// Need at least one query term
|
|
||||||
if (title == null && author == null && year == -1) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
HttpGet method = null;
|
|
||||||
List<String> ids = new ArrayList<>();
|
|
||||||
try ( CloseableHttpClient client = HttpClientBuilder.create().build(); ) {
|
|
||||||
StringBuilder query = new StringBuilder();
|
|
||||||
query.append("format=rss&appid=").append(appId)
|
|
||||||
.append("&count=").append(maxResults);
|
|
||||||
if (title != null) {
|
|
||||||
query.append("&title=").append(URLEncoder.encode(title, "UTF-8"));
|
|
||||||
}
|
|
||||||
if (author != null) {
|
|
||||||
query.append("&author=").append(URLEncoder.encode(author, "UTF-8"));
|
|
||||||
}
|
|
||||||
if (year != -1) {
|
|
||||||
query.append("&year_from=").append(String.valueOf(year));
|
|
||||||
query.append("&year_to=").append(String.valueOf(year));
|
|
||||||
}
|
|
||||||
|
|
||||||
RequestConfig requestConfig = RequestConfig.custom()
|
|
||||||
.setConnectTimeout(timeout)
|
|
||||||
.build();
|
|
||||||
method = new HttpGet("http://ci.nii.ac.jp/opensearch/search?" + query.toString());
|
|
||||||
method.setConfig(requestConfig);
|
|
||||||
|
|
||||||
// Execute the method.
|
|
||||||
HttpResponse response = client.execute(method);
|
|
||||||
StatusLine statusLine = response.getStatusLine();
|
|
||||||
int statusCode = statusLine.getStatusCode();
|
|
||||||
if (statusCode != HttpStatus.SC_OK) {
|
|
||||||
if (statusCode == HttpStatus.SC_BAD_REQUEST) {
|
|
||||||
throw new RuntimeException("CiNii OpenSearch query is not valid");
|
|
||||||
} else {
|
|
||||||
throw new RuntimeException("CiNii OpenSearch call failed: "
|
|
||||||
+ statusLine);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
DocumentBuilderFactory factory = DocumentBuilderFactory
|
|
||||||
.newInstance();
|
|
||||||
factory.setValidating(false);
|
|
||||||
factory.setIgnoringComments(true);
|
|
||||||
factory.setIgnoringElementContentWhitespace(true);
|
|
||||||
// disallow DTD parsing to ensure no XXE attacks can occur.
|
|
||||||
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
|
|
||||||
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
|
||||||
|
|
||||||
DocumentBuilder db = factory.newDocumentBuilder();
|
|
||||||
Document inDoc = db.parse(response.getEntity().getContent());
|
|
||||||
|
|
||||||
Element xmlRoot = inDoc.getDocumentElement();
|
|
||||||
List<Element> items = XMLUtils.getElementList(xmlRoot, "item");
|
|
||||||
|
|
||||||
int url_len = "http://ci.nii.ac.jp/naid/".length();
|
|
||||||
for (Element item : items) {
|
|
||||||
String about = item.getAttribute("rdf:about");
|
|
||||||
if (about.length() > url_len) {
|
|
||||||
ids.add(about.substring(url_len));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ids;
|
|
||||||
} catch (Exception e) {
|
|
||||||
throw new RuntimeException(
|
|
||||||
"CiNii OpenSearch results is not valid or not exist");
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
if (method != null) {
|
|
||||||
method.releaseConnection();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,225 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.util.LinkedList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.MutableRecord;
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import gr.ekt.bte.core.StringValue;
|
|
||||||
import gr.ekt.bte.core.Value;
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
import org.dspace.app.util.XMLUtils;
|
|
||||||
import org.dspace.submit.util.SubmissionLookupPublication;
|
|
||||||
import org.w3c.dom.Element;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Keiji Suzuki
|
|
||||||
*/
|
|
||||||
public class CiNiiUtils {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Default constructor
|
|
||||||
*/
|
|
||||||
private CiNiiUtils() { }
|
|
||||||
|
|
||||||
public static Record convertCiNiiDomToRecord(Element xmlRoot) {
|
|
||||||
MutableRecord record = new SubmissionLookupPublication("");
|
|
||||||
|
|
||||||
List<Element> list = XMLUtils.getElementList(xmlRoot, "rdf:Description");
|
|
||||||
// Valid CiNii record should have three rdf:Description elements
|
|
||||||
if (list.size() < 3) {
|
|
||||||
return record;
|
|
||||||
}
|
|
||||||
|
|
||||||
Element description_ja = list.get(0); // Japanese description
|
|
||||||
Element description_en = list.get(1); // English description
|
|
||||||
// Element description3 = list.get(2); // Authors information: NOT USE here
|
|
||||||
|
|
||||||
String language = XMLUtils.getElementValue(description_ja, "dc:language");
|
|
||||||
language = language != null ? language.toLowerCase() : "ja";
|
|
||||||
record.addValue("language", new StringValue(language));
|
|
||||||
|
|
||||||
if ("ja".equals(language) || "jpn".equals(language)) {
|
|
||||||
String title = XMLUtils.getElementValue(description_ja, "dc:title");
|
|
||||||
if (title != null) {
|
|
||||||
record.addValue("title", new StringValue(title));
|
|
||||||
}
|
|
||||||
String titleAlternative = XMLUtils.getElementValue(description_en, "dc:title");
|
|
||||||
if (titleAlternative != null) {
|
|
||||||
record.addValue("titleAlternative", new StringValue(titleAlternative));
|
|
||||||
}
|
|
||||||
|
|
||||||
List<Value> authors = getAuthors(description_ja);
|
|
||||||
if (authors.size() > 0) {
|
|
||||||
record.addField("authors", authors);
|
|
||||||
}
|
|
||||||
List<Value> authorAlternative = getAuthors(description_en);
|
|
||||||
if (authorAlternative.size() > 0) {
|
|
||||||
record.addField("auhtorAlternative", authorAlternative);
|
|
||||||
}
|
|
||||||
|
|
||||||
String publisher = XMLUtils.getElementValue(description_ja, "dc:publisher");
|
|
||||||
if (publisher != null) {
|
|
||||||
record.addValue("publisher", new StringValue(publisher));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
String title = XMLUtils.getElementValue(description_en, "dc:title");
|
|
||||||
if (title != null) {
|
|
||||||
record.addValue("title", new StringValue(title));
|
|
||||||
}
|
|
||||||
String titleAlternative = XMLUtils.getElementValue(description_ja, "dc:title");
|
|
||||||
if (titleAlternative != null) {
|
|
||||||
record.addValue("titleAlternative", new StringValue(titleAlternative));
|
|
||||||
}
|
|
||||||
|
|
||||||
List<Value> authors = getAuthors(description_en);
|
|
||||||
if (authors.size() > 0) {
|
|
||||||
record.addField("authors", authors);
|
|
||||||
}
|
|
||||||
List<Value> authorAlternative = getAuthors(description_ja);
|
|
||||||
if (authorAlternative.size() > 0) {
|
|
||||||
record.addField("authorAlternative", authorAlternative);
|
|
||||||
}
|
|
||||||
|
|
||||||
String publisher = XMLUtils.getElementValue(description_en, "dc:publisher");
|
|
||||||
if (publisher != null) {
|
|
||||||
record.addValue("publisher", new StringValue(publisher));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
String abstract_ja = XMLUtils.getElementValue(description_ja, "dc:description");
|
|
||||||
String abstract_en = XMLUtils.getElementValue(description_en, "dc:description");
|
|
||||||
if (abstract_ja != null && abstract_en != null) {
|
|
||||||
List<Value> description = new LinkedList<Value>();
|
|
||||||
description.add(new StringValue(abstract_ja));
|
|
||||||
description.add(new StringValue(abstract_en));
|
|
||||||
record.addField("description", description);
|
|
||||||
} else if (abstract_ja != null) {
|
|
||||||
record.addValue("description", new StringValue(abstract_ja));
|
|
||||||
} else if (abstract_en != null) {
|
|
||||||
record.addValue("description", new StringValue(abstract_en));
|
|
||||||
}
|
|
||||||
|
|
||||||
List<Value> subjects = getSubjects(description_ja);
|
|
||||||
subjects.addAll(getSubjects(description_en));
|
|
||||||
if (subjects.size() > 0) {
|
|
||||||
record.addField("subjects", subjects);
|
|
||||||
}
|
|
||||||
|
|
||||||
String journal_j = XMLUtils.getElementValue(description_ja, "prism:publicationName");
|
|
||||||
String journal_e = XMLUtils.getElementValue(description_en, "prism:publicationName");
|
|
||||||
if (journal_j != null && journal_e != null) {
|
|
||||||
record.addValue("journal", new StringValue(journal_j + " = " + journal_e));
|
|
||||||
} else if (journal_j != null) {
|
|
||||||
|
|
||||||
record.addValue("journal", new StringValue(journal_j));
|
|
||||||
} else if (journal_e != null) {
|
|
||||||
|
|
||||||
record.addValue("journal", new StringValue(journal_e));
|
|
||||||
}
|
|
||||||
|
|
||||||
String volume = XMLUtils.getElementValue(description_ja, "prism:volume");
|
|
||||||
if (volume != null) {
|
|
||||||
record.addValue("volume", new StringValue(volume));
|
|
||||||
}
|
|
||||||
|
|
||||||
String issue = XMLUtils.getElementValue(description_ja, "prism:number");
|
|
||||||
if (issue != null) {
|
|
||||||
record.addValue("issue", new StringValue(issue));
|
|
||||||
}
|
|
||||||
|
|
||||||
String spage = XMLUtils.getElementValue(description_ja, "prism:startingPage");
|
|
||||||
if (spage != null) {
|
|
||||||
record.addValue("spage", new StringValue(spage));
|
|
||||||
}
|
|
||||||
|
|
||||||
String epage = XMLUtils.getElementValue(description_ja, "prism:endingPage");
|
|
||||||
if (epage != null) {
|
|
||||||
record.addValue("epage", new StringValue(epage));
|
|
||||||
}
|
|
||||||
|
|
||||||
String pages = XMLUtils.getElementValue(description_ja, "prism:pageRange");
|
|
||||||
if (pages != null && spage == null) {
|
|
||||||
int pos = pages.indexOf("-");
|
|
||||||
if (pos > -1) {
|
|
||||||
spage = pages.substring(0, pos);
|
|
||||||
epage = pages.substring(pos + 1, pages.length() - pos);
|
|
||||||
if (!epage.equals("") && spage.length() > epage.length()) {
|
|
||||||
epage = spage.substring(0, spage.length() - epage.length()) + epage;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
spage = pages;
|
|
||||||
epage = "";
|
|
||||||
}
|
|
||||||
record.addValue("spage", new StringValue(spage));
|
|
||||||
if (!epage.equals("") && epage == null) {
|
|
||||||
record.addValue("epage", new StringValue(epage));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
String issn = XMLUtils.getElementValue(description_ja, "prism:issn");
|
|
||||||
if (issn != null) {
|
|
||||||
record.addValue("issn", new StringValue(issn));
|
|
||||||
}
|
|
||||||
|
|
||||||
String issued = XMLUtils.getElementValue(description_ja, "prism:publicationDate");
|
|
||||||
if (issued != null) {
|
|
||||||
record.addValue("issued", new StringValue(issued));
|
|
||||||
}
|
|
||||||
|
|
||||||
String ncid = XMLUtils.getElementValue(description_ja, "cinii:ncid");
|
|
||||||
if (ncid != null) {
|
|
||||||
record.addValue("ncid", new StringValue(ncid));
|
|
||||||
}
|
|
||||||
|
|
||||||
String naid = XMLUtils.getElementValue(description_ja, "cinii:naid");
|
|
||||||
if (naid != null) {
|
|
||||||
record.addValue("naid", new StringValue(naid));
|
|
||||||
}
|
|
||||||
|
|
||||||
return record;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static List<Value> getAuthors(Element element) {
|
|
||||||
List<Value> authors = new LinkedList<Value>();
|
|
||||||
|
|
||||||
List<String> authorList = XMLUtils.getElementValueList(element, "dc:creator");
|
|
||||||
if (authorList != null && authorList.size() > 0) {
|
|
||||||
for (String author : authorList) {
|
|
||||||
int pos = author.indexOf(" ");
|
|
||||||
if (pos > -1) {
|
|
||||||
author = author.substring(0, pos) + "," + author.substring(pos);
|
|
||||||
}
|
|
||||||
authors.add(new StringValue(author));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return authors;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static List<Value> getSubjects(Element element) {
|
|
||||||
List<Value> subjects = new LinkedList<Value>();
|
|
||||||
|
|
||||||
List<Element> topicList = XMLUtils.getElementList(element, "foaf:topic");
|
|
||||||
String attrValue = null;
|
|
||||||
for (Element topic : topicList) {
|
|
||||||
attrValue = topic.getAttribute("dc:title");
|
|
||||||
if (StringUtils.isNotBlank(attrValue)) {
|
|
||||||
subjects.add(new StringValue(attrValue.trim()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return subjects;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@@ -1,142 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.FileInputStream;
|
|
||||||
import java.io.FileNotFoundException;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.InputStream;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import javax.xml.parsers.DocumentBuilder;
|
|
||||||
import javax.xml.parsers.DocumentBuilderFactory;
|
|
||||||
import javax.xml.parsers.ParserConfigurationException;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.DataLoadingSpec;
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import gr.ekt.bte.core.RecordSet;
|
|
||||||
import gr.ekt.bte.core.Value;
|
|
||||||
import gr.ekt.bte.dataloader.FileDataLoader;
|
|
||||||
import gr.ekt.bte.exceptions.MalformedSourceException;
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
import org.dspace.app.util.XMLUtils;
|
|
||||||
import org.w3c.dom.Document;
|
|
||||||
import org.w3c.dom.Element;
|
|
||||||
import org.xml.sax.SAXException;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Andrea Bollini
|
|
||||||
* @author Kostas Stamatis
|
|
||||||
* @author Luigi Andrea Pascarelli
|
|
||||||
* @author Panagiotis Koutsourakis
|
|
||||||
*/
|
|
||||||
public class CrossRefFileDataLoader extends FileDataLoader {
|
|
||||||
|
|
||||||
Map<String, String> fieldMap; // mapping between service fields and local
|
|
||||||
// intermediate fields
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
public CrossRefFileDataLoader() {
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param filename Name of file to load ArXiv data from.
|
|
||||||
*/
|
|
||||||
public CrossRefFileDataLoader(String filename) {
|
|
||||||
super(filename);
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* (non-Javadoc)
|
|
||||||
*
|
|
||||||
* @see gr.ekt.bte.core.DataLoader#getRecords()
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public RecordSet getRecords() throws MalformedSourceException {
|
|
||||||
|
|
||||||
RecordSet recordSet = new RecordSet();
|
|
||||||
|
|
||||||
try {
|
|
||||||
InputStream inputStream = new FileInputStream(new File(filename));
|
|
||||||
|
|
||||||
DocumentBuilderFactory factory = DocumentBuilderFactory
|
|
||||||
.newInstance();
|
|
||||||
factory.setValidating(false);
|
|
||||||
factory.setIgnoringComments(true);
|
|
||||||
factory.setIgnoringElementContentWhitespace(true);
|
|
||||||
|
|
||||||
DocumentBuilder db = factory.newDocumentBuilder();
|
|
||||||
Document inDoc = db.parse(inputStream);
|
|
||||||
|
|
||||||
Element xmlRoot = inDoc.getDocumentElement();
|
|
||||||
Element queryResult = XMLUtils.getSingleElement(xmlRoot, "query_result");
|
|
||||||
Element body = XMLUtils.getSingleElement(queryResult, "body");
|
|
||||||
Element dataRoot = XMLUtils.getSingleElement(body, "query");
|
|
||||||
Record record = CrossRefUtils.convertCrossRefDomToRecord(dataRoot);
|
|
||||||
recordSet.addRecord(convertFields(record));
|
|
||||||
|
|
||||||
} catch (FileNotFoundException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
} catch (ParserConfigurationException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
} catch (SAXException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
} catch (IOException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
|
||||||
|
|
||||||
return recordSet;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* (non-Javadoc)
|
|
||||||
*
|
|
||||||
* @see
|
|
||||||
* gr.ekt.bte.core.DataLoader#getRecords(gr.ekt.bte.core.DataLoadingSpec)
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public RecordSet getRecords(DataLoadingSpec spec)
|
|
||||||
throws MalformedSourceException {
|
|
||||||
if (spec.getOffset() > 0) {
|
|
||||||
return new RecordSet();
|
|
||||||
}
|
|
||||||
return getRecords();
|
|
||||||
}
|
|
||||||
|
|
||||||
public Record convertFields(Record publication) {
|
|
||||||
for (String fieldName : fieldMap.keySet()) {
|
|
||||||
String md = null;
|
|
||||||
if (fieldMap != null) {
|
|
||||||
md = this.fieldMap.get(fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (StringUtils.isBlank(md)) {
|
|
||||||
continue;
|
|
||||||
} else {
|
|
||||||
md = md.trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (publication.isMutable()) {
|
|
||||||
List<Value> values = publication.getValues(fieldName);
|
|
||||||
publication.makeMutable().removeField(fieldName);
|
|
||||||
publication.makeMutable().addField(md, values);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return publication;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setFieldMap(Map<String, String> fieldMap) {
|
|
||||||
this.fieldMap = fieldMap;
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,112 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Set;
|
|
||||||
import javax.xml.parsers.ParserConfigurationException;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import org.apache.http.HttpException;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.jdom.JDOMException;
|
|
||||||
import org.xml.sax.SAXException;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Andrea Bollini
|
|
||||||
* @author Kostas Stamatis
|
|
||||||
* @author Luigi Andrea Pascarelli
|
|
||||||
* @author Panagiotis Koutsourakis
|
|
||||||
*/
|
|
||||||
public class CrossRefOnlineDataLoader extends NetworkSubmissionLookupDataLoader {
|
|
||||||
protected CrossRefService crossrefService = new CrossRefService();
|
|
||||||
|
|
||||||
protected boolean searchProvider = true;
|
|
||||||
|
|
||||||
protected String apiKey = null;
|
|
||||||
protected int maxResults = 10;
|
|
||||||
|
|
||||||
public void setSearchProvider(boolean searchProvider) {
|
|
||||||
this.searchProvider = searchProvider;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setCrossrefService(CrossRefService crossrefService) {
|
|
||||||
this.crossrefService = crossrefService;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public List<String> getSupportedIdentifiers() {
|
|
||||||
return Arrays.asList(new String[] {DOI});
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public List<Record> getByIdentifier(Context context,
|
|
||||||
Map<String, Set<String>> keys) throws HttpException, IOException {
|
|
||||||
if (keys != null && keys.containsKey(DOI)) {
|
|
||||||
Set<String> dois = keys.get(DOI);
|
|
||||||
List<Record> items = null;
|
|
||||||
List<Record> results = new ArrayList<Record>();
|
|
||||||
|
|
||||||
if (getApiKey() == null) {
|
|
||||||
throw new RuntimeException("No CrossRef API key is specified!");
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
items = crossrefService.search(context, dois, getApiKey());
|
|
||||||
} catch (JDOMException e) {
|
|
||||||
throw new RuntimeException(e.getMessage(), e);
|
|
||||||
} catch (ParserConfigurationException e) {
|
|
||||||
throw new RuntimeException(e.getMessage(), e);
|
|
||||||
} catch (SAXException e) {
|
|
||||||
throw new RuntimeException(e.getMessage(), e);
|
|
||||||
}
|
|
||||||
for (Record record : items) {
|
|
||||||
results.add(convertFields(record));
|
|
||||||
}
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public List<Record> search(Context context, String title, String author,
|
|
||||||
int year) throws HttpException, IOException {
|
|
||||||
if (getApiKey() == null) {
|
|
||||||
throw new RuntimeException("No CrossRef API key is specified!");
|
|
||||||
}
|
|
||||||
|
|
||||||
List<Record> items = crossrefService.search(context, title, author,
|
|
||||||
year, getMaxResults(), getApiKey());
|
|
||||||
return items;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isSearchProvider() {
|
|
||||||
return searchProvider;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getApiKey() {
|
|
||||||
return apiKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setApiKey(String apiKey) {
|
|
||||||
this.apiKey = apiKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getMaxResults() {
|
|
||||||
return maxResults;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setMaxResults(int maxResults) {
|
|
||||||
this.maxResults = maxResults;
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,204 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.lang.reflect.Type;
|
|
||||||
import java.net.URISyntaxException;
|
|
||||||
import java.nio.charset.StandardCharsets;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Set;
|
|
||||||
import javax.xml.parsers.DocumentBuilder;
|
|
||||||
import javax.xml.parsers.DocumentBuilderFactory;
|
|
||||||
import javax.xml.parsers.ParserConfigurationException;
|
|
||||||
|
|
||||||
import com.google.gson.Gson;
|
|
||||||
import com.google.gson.reflect.TypeToken;
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import org.apache.commons.io.IOUtils;
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
import org.apache.http.HttpException;
|
|
||||||
import org.apache.http.HttpResponse;
|
|
||||||
import org.apache.http.HttpStatus;
|
|
||||||
import org.apache.http.StatusLine;
|
|
||||||
import org.apache.http.client.HttpClient;
|
|
||||||
import org.apache.http.client.config.RequestConfig;
|
|
||||||
import org.apache.http.client.methods.HttpGet;
|
|
||||||
import org.apache.http.client.utils.URIBuilder;
|
|
||||||
import org.apache.http.impl.client.CloseableHttpClient;
|
|
||||||
import org.apache.http.impl.client.HttpClientBuilder;
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.dspace.app.util.XMLUtils;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.core.LogManager;
|
|
||||||
import org.jdom.JDOMException;
|
|
||||||
import org.w3c.dom.Document;
|
|
||||||
import org.w3c.dom.Element;
|
|
||||||
import org.xml.sax.SAXException;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Andrea Bollini
|
|
||||||
* @author Kostas Stamatis
|
|
||||||
* @author Luigi Andrea Pascarelli
|
|
||||||
* @author Panagiotis Koutsourakis
|
|
||||||
*/
|
|
||||||
public class CrossRefService {
|
|
||||||
|
|
||||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(CrossRefService.class);
|
|
||||||
|
|
||||||
protected int timeout = 1000;
|
|
||||||
|
|
||||||
public void setTimeout(int timeout) {
|
|
||||||
this.timeout = timeout;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<Record> search(Context context, Set<String> dois, String apiKey)
|
|
||||||
throws HttpException, IOException, JDOMException,
|
|
||||||
ParserConfigurationException, SAXException {
|
|
||||||
List<Record> results = new ArrayList<>();
|
|
||||||
if (dois != null && dois.size() > 0) {
|
|
||||||
for (String record : dois) {
|
|
||||||
try {
|
|
||||||
HttpGet method = null;
|
|
||||||
try {
|
|
||||||
HttpClient client = HttpClientBuilder.create().build();
|
|
||||||
|
|
||||||
try {
|
|
||||||
URIBuilder uriBuilder = new URIBuilder(
|
|
||||||
"http://www.crossref.org/openurl/");
|
|
||||||
uriBuilder.addParameter("pid", apiKey);
|
|
||||||
uriBuilder.addParameter("noredirect", "true");
|
|
||||||
uriBuilder.addParameter("id", record);
|
|
||||||
|
|
||||||
method = new HttpGet(uriBuilder.build());
|
|
||||||
RequestConfig requestConfig = RequestConfig.custom()
|
|
||||||
.setConnectTimeout(timeout)
|
|
||||||
.build();
|
|
||||||
method.setConfig(requestConfig);
|
|
||||||
} catch (URISyntaxException ex) {
|
|
||||||
throw new HttpException("Request not sent", ex);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Execute the method.
|
|
||||||
HttpResponse response = client.execute(method);
|
|
||||||
StatusLine statusLine = response.getStatusLine();
|
|
||||||
int statusCode = statusLine.getStatusCode();
|
|
||||||
|
|
||||||
if (statusCode != HttpStatus.SC_OK) {
|
|
||||||
throw new RuntimeException("Http call failed: "
|
|
||||||
+ statusLine);
|
|
||||||
}
|
|
||||||
|
|
||||||
Record crossitem;
|
|
||||||
try {
|
|
||||||
DocumentBuilderFactory factory = DocumentBuilderFactory
|
|
||||||
.newInstance();
|
|
||||||
factory.setValidating(false);
|
|
||||||
factory.setIgnoringComments(true);
|
|
||||||
factory.setIgnoringElementContentWhitespace(true);
|
|
||||||
// disallow DTD parsing to ensure no XXE attacks can occur.
|
|
||||||
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
|
|
||||||
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
|
||||||
|
|
||||||
DocumentBuilder db = factory
|
|
||||||
.newDocumentBuilder();
|
|
||||||
Document inDoc = db.parse(response.getEntity().getContent());
|
|
||||||
|
|
||||||
Element xmlRoot = inDoc.getDocumentElement();
|
|
||||||
Element queryResult = XMLUtils.getSingleElement(xmlRoot, "query_result");
|
|
||||||
Element body = XMLUtils.getSingleElement(queryResult, "body");
|
|
||||||
Element dataRoot = XMLUtils.getSingleElement(body, "query");
|
|
||||||
|
|
||||||
crossitem = CrossRefUtils
|
|
||||||
.convertCrossRefDomToRecord(dataRoot);
|
|
||||||
results.add(crossitem);
|
|
||||||
} catch (Exception e) {
|
|
||||||
log.warn(LogManager
|
|
||||||
.getHeader(
|
|
||||||
context,
|
|
||||||
"retrieveRecordDOI",
|
|
||||||
record
|
|
||||||
+ " DOI is not valid or not exist: "
|
|
||||||
+ e.getMessage()));
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
if (method != null) {
|
|
||||||
method.releaseConnection();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (RuntimeException rt) {
|
|
||||||
log.error(rt.getMessage(), rt);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<Record> search(Context context, String title, String authors,
|
|
||||||
int year, int count, String apiKey) throws IOException, HttpException {
|
|
||||||
HttpGet method = null;
|
|
||||||
try ( CloseableHttpClient client = HttpClientBuilder.create().build(); ) {
|
|
||||||
|
|
||||||
URIBuilder uriBuilder = new URIBuilder("http://search.labs.crossref.org/dois");
|
|
||||||
|
|
||||||
StringBuilder sb = new StringBuilder();
|
|
||||||
if (StringUtils.isNotBlank(title)) {
|
|
||||||
sb.append(title);
|
|
||||||
}
|
|
||||||
sb.append(" ");
|
|
||||||
if (StringUtils.isNotBlank(authors)) {
|
|
||||||
sb.append(authors);
|
|
||||||
}
|
|
||||||
String q = sb.toString().trim();
|
|
||||||
uriBuilder.addParameter("q", q);
|
|
||||||
|
|
||||||
uriBuilder.addParameter("year", year != -1 ? String.valueOf(year) : "");
|
|
||||||
uriBuilder.addParameter("rows", count != -1 ? String.valueOf(count) : "");
|
|
||||||
|
|
||||||
method = new HttpGet(uriBuilder.build());
|
|
||||||
RequestConfig requestConfig = RequestConfig.custom()
|
|
||||||
.setConnectTimeout(timeout)
|
|
||||||
.build();
|
|
||||||
method.setConfig(requestConfig);
|
|
||||||
|
|
||||||
// Execute the method.
|
|
||||||
HttpResponse response = client.execute(method);
|
|
||||||
StatusLine statusLine = response.getStatusLine();
|
|
||||||
int statusCode = statusLine.getStatusCode();
|
|
||||||
|
|
||||||
if (statusCode != HttpStatus.SC_OK) {
|
|
||||||
throw new RuntimeException("Http call failed:: "
|
|
||||||
+ statusLine);
|
|
||||||
}
|
|
||||||
|
|
||||||
Gson gson = new Gson();
|
|
||||||
Type listType = new TypeToken<ArrayList<Map>>() {
|
|
||||||
}.getType();
|
|
||||||
List<Map> json = gson.fromJson(
|
|
||||||
IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8),
|
|
||||||
listType);
|
|
||||||
Set<String> dois = new HashSet<>();
|
|
||||||
for (Map r : json) {
|
|
||||||
dois.add(SubmissionLookupUtils.normalizeDOI((String) r
|
|
||||||
.get("doi")));
|
|
||||||
}
|
|
||||||
method.releaseConnection();
|
|
||||||
|
|
||||||
return search(context, dois, apiKey);
|
|
||||||
} catch (Exception e) {
|
|
||||||
throw new RuntimeException(e.getMessage(), e);
|
|
||||||
} finally {
|
|
||||||
if (method != null) {
|
|
||||||
method.releaseConnection();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,216 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.util.LinkedList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.MutableRecord;
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import gr.ekt.bte.core.StringValue;
|
|
||||||
import gr.ekt.bte.core.Value;
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
import org.dspace.app.util.XMLUtils;
|
|
||||||
import org.dspace.submit.util.SubmissionLookupPublication;
|
|
||||||
import org.w3c.dom.Element;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Andrea Bollini
|
|
||||||
* @author Kostas Stamatis
|
|
||||||
* @author Luigi Andrea Pascarelli
|
|
||||||
* @author Panagiotis Koutsourakis
|
|
||||||
*/
|
|
||||||
public class CrossRefUtils {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Default constructor
|
|
||||||
*/
|
|
||||||
private CrossRefUtils() { }
|
|
||||||
|
|
||||||
public static Record convertCrossRefDomToRecord(Element dataRoot) {
|
|
||||||
MutableRecord record = new SubmissionLookupPublication("");
|
|
||||||
|
|
||||||
String status = dataRoot.getAttribute("status");
|
|
||||||
if (!"resolved".equals(status)) {
|
|
||||||
String msg = XMLUtils.getElementValue(dataRoot, "msg");
|
|
||||||
String exMsg = status + " - " + msg;
|
|
||||||
throw new RuntimeException(exMsg);
|
|
||||||
}
|
|
||||||
|
|
||||||
String doi = XMLUtils.getElementValue(dataRoot, "doi");
|
|
||||||
if (doi != null) {
|
|
||||||
record.addValue("doi", new StringValue(doi));
|
|
||||||
}
|
|
||||||
|
|
||||||
String itemType = doi != null ? XMLUtils.getElementAttribute(dataRoot,
|
|
||||||
"doi", "type") : "unspecified";
|
|
||||||
if (itemType != null) {
|
|
||||||
record.addValue("doiType", new StringValue(itemType));
|
|
||||||
}
|
|
||||||
|
|
||||||
List<Element> identifier = XMLUtils.getElementList(dataRoot, "issn");
|
|
||||||
for (Element ident : identifier) {
|
|
||||||
if ("print".equalsIgnoreCase(ident.getAttribute("type"))
|
|
||||||
|| StringUtils.isNotBlank(ident.getAttribute("type"))) {
|
|
||||||
String issn = ident.getTextContent().trim();
|
|
||||||
if (issn != null) {
|
|
||||||
record.addValue("printISSN", new StringValue(issn));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
String eissn = ident.getTextContent().trim();
|
|
||||||
if (eissn != null) {
|
|
||||||
record.addValue("electronicISSN", new StringValue(eissn));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
List<Element> identifierisbn = XMLUtils.getElementList(dataRoot, "isbn");
|
|
||||||
for (Element ident : identifierisbn) {
|
|
||||||
if ("print".equalsIgnoreCase(ident.getAttribute("type"))
|
|
||||||
|| StringUtils.isNotBlank(ident.getAttribute("type"))) {
|
|
||||||
String issn = ident.getTextContent().trim();
|
|
||||||
if (issn != null) {
|
|
||||||
record.addValue("printISBN", new StringValue(issn));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
String eissn = ident.getTextContent().trim();
|
|
||||||
if (eissn != null) {
|
|
||||||
record.addValue("electronicISBN", new StringValue(eissn));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
String editionNumber = XMLUtils.getElementValue(dataRoot,
|
|
||||||
"editionNumber");
|
|
||||||
if (editionNumber != null) {
|
|
||||||
record.addValue("editionNumber", new StringValue(editionNumber));
|
|
||||||
}
|
|
||||||
|
|
||||||
String volume = XMLUtils.getElementValue(dataRoot, "volume");
|
|
||||||
if (volume != null) {
|
|
||||||
record.addValue("volume", new StringValue(volume));
|
|
||||||
}
|
|
||||||
|
|
||||||
String issue = XMLUtils.getElementValue(dataRoot, "issue");
|
|
||||||
if (issue != null) {
|
|
||||||
record.addValue("issue", new StringValue(issue));
|
|
||||||
}
|
|
||||||
|
|
||||||
String year = XMLUtils.getElementValue(dataRoot, "year");
|
|
||||||
if (year != null) {
|
|
||||||
record.addValue("year", new StringValue(year));
|
|
||||||
}
|
|
||||||
|
|
||||||
String firstPage = XMLUtils.getElementValue(dataRoot, "first_page");
|
|
||||||
if (firstPage != null) {
|
|
||||||
record.addValue("firstPage", new StringValue(firstPage));
|
|
||||||
}
|
|
||||||
|
|
||||||
String lastPage = XMLUtils.getElementValue(dataRoot, "last_page");
|
|
||||||
if (lastPage != null) {
|
|
||||||
record.addValue("lastPage", new StringValue(lastPage));
|
|
||||||
}
|
|
||||||
|
|
||||||
String seriesTitle = XMLUtils.getElementValue(dataRoot, "series_title");
|
|
||||||
if (seriesTitle != null) {
|
|
||||||
record.addValue("seriesTitle", new StringValue(seriesTitle));
|
|
||||||
}
|
|
||||||
|
|
||||||
String journalTitle = XMLUtils.getElementValue(dataRoot,
|
|
||||||
"journal_title");
|
|
||||||
if (journalTitle != null) {
|
|
||||||
record.addValue("journalTitle", new StringValue(journalTitle));
|
|
||||||
}
|
|
||||||
|
|
||||||
String volumeTitle = XMLUtils.getElementValue(dataRoot, "volume_title");
|
|
||||||
if (volumeTitle != null) {
|
|
||||||
record.addValue("volumeTitle", new StringValue(volumeTitle));
|
|
||||||
}
|
|
||||||
|
|
||||||
String articleTitle = XMLUtils.getElementValue(dataRoot,
|
|
||||||
"article_title");
|
|
||||||
if (articleTitle != null) {
|
|
||||||
record.addValue("articleTitle", new StringValue(articleTitle));
|
|
||||||
}
|
|
||||||
|
|
||||||
String publicationType = XMLUtils.getElementValue(dataRoot,
|
|
||||||
"pubblication_type");
|
|
||||||
if (publicationType != null) {
|
|
||||||
record.addValue("publicationType", new StringValue(publicationType));
|
|
||||||
}
|
|
||||||
|
|
||||||
List<String[]> authors = new LinkedList<String[]>();
|
|
||||||
List<String[]> editors = new LinkedList<String[]>();
|
|
||||||
List<String[]> translators = new LinkedList<String[]>();
|
|
||||||
List<String[]> chairs = new LinkedList<String[]>();
|
|
||||||
|
|
||||||
List<Element> contributors = XMLUtils.getElementList(dataRoot,
|
|
||||||
"contributors");
|
|
||||||
List<Element> contributor = null;
|
|
||||||
if (contributors != null && contributors.size() > 0) {
|
|
||||||
contributor = XMLUtils.getElementList(contributors.get(0),
|
|
||||||
"contributor");
|
|
||||||
|
|
||||||
for (Element contrib : contributor) {
|
|
||||||
|
|
||||||
String givenName = XMLUtils.getElementValue(contrib,
|
|
||||||
"given_name");
|
|
||||||
String surname = XMLUtils.getElementValue(contrib, "surname");
|
|
||||||
|
|
||||||
if ("editor".equalsIgnoreCase(contrib
|
|
||||||
.getAttribute("contributor_role"))) {
|
|
||||||
editors.add(new String[] {givenName, surname});
|
|
||||||
} else if ("chair".equalsIgnoreCase(contrib
|
|
||||||
.getAttribute("contributor_role"))) {
|
|
||||||
chairs.add(new String[] {givenName, surname});
|
|
||||||
} else if ("translator".equalsIgnoreCase(contrib
|
|
||||||
.getAttribute("contributor_role"))) {
|
|
||||||
translators.add(new String[] {givenName, surname});
|
|
||||||
} else {
|
|
||||||
authors.add(new String[] {givenName, surname});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (authors.size() > 0) {
|
|
||||||
List<Value> values = new LinkedList<Value>();
|
|
||||||
for (String[] sArray : authors) {
|
|
||||||
values.add(new StringValue(sArray[1] + ", " + sArray[0]));
|
|
||||||
}
|
|
||||||
record.addField("authors", values);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (editors.size() > 0) {
|
|
||||||
List<Value> values = new LinkedList<Value>();
|
|
||||||
for (String[] sArray : editors) {
|
|
||||||
values.add(new StringValue(sArray[1] + ", " + sArray[0]));
|
|
||||||
}
|
|
||||||
record.addField("editors", values);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (translators.size() > 0) {
|
|
||||||
List<Value> values = new LinkedList<Value>();
|
|
||||||
for (String[] sArray : translators) {
|
|
||||||
values.add(new StringValue(sArray[1] + ", " + sArray[0]));
|
|
||||||
}
|
|
||||||
record.addField("translators", values);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (chairs.size() > 0) {
|
|
||||||
List<Value> values = new LinkedList<Value>();
|
|
||||||
for (String[] sArray : chairs) {
|
|
||||||
values.add(new StringValue(sArray[1] + ", " + sArray[0]));
|
|
||||||
}
|
|
||||||
record.addField("chairs", values);
|
|
||||||
}
|
|
||||||
return record;
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,364 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.sql.SQLException;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.DataOutputSpec;
|
|
||||||
import gr.ekt.bte.core.OutputGenerator;
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import gr.ekt.bte.core.RecordSet;
|
|
||||||
import gr.ekt.bte.core.Value;
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
import org.apache.logging.log4j.LogManager;
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.dspace.app.util.DCInput;
|
|
||||||
import org.dspace.app.util.DCInputSet;
|
|
||||||
import org.dspace.app.util.DCInputsReader;
|
|
||||||
import org.dspace.app.util.DCInputsReaderException;
|
|
||||||
import org.dspace.authorize.AuthorizeException;
|
|
||||||
import org.dspace.content.Collection;
|
|
||||||
import org.dspace.content.Item;
|
|
||||||
import org.dspace.content.MetadataField;
|
|
||||||
import org.dspace.content.MetadataSchema;
|
|
||||||
import org.dspace.content.WorkspaceItem;
|
|
||||||
import org.dspace.content.service.ItemService;
|
|
||||||
import org.dspace.content.service.MetadataFieldService;
|
|
||||||
import org.dspace.content.service.MetadataSchemaService;
|
|
||||||
import org.dspace.content.service.WorkspaceItemService;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.submit.util.ItemSubmissionLookupDTO;
|
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Andrea Bollini
|
|
||||||
* @author Kostas Stamatis
|
|
||||||
* @author Luigi Andrea Pascarelli
|
|
||||||
* @author Panagiotis Koutsourakis
|
|
||||||
*/
|
|
||||||
public class DSpaceWorkspaceItemOutputGenerator implements OutputGenerator {
|
|
||||||
|
|
||||||
private static Logger log = LogManager.getLogger(DSpaceWorkspaceItemOutputGenerator.class);
|
|
||||||
|
|
||||||
protected Context context;
|
|
||||||
|
|
||||||
protected String formName;
|
|
||||||
|
|
||||||
protected List<WorkspaceItem> witems;
|
|
||||||
|
|
||||||
protected ItemSubmissionLookupDTO dto;
|
|
||||||
|
|
||||||
protected Collection collection;
|
|
||||||
|
|
||||||
Map<String, String> outputMap;
|
|
||||||
|
|
||||||
protected List<String> extraMetadataToKeep;
|
|
||||||
|
|
||||||
@Autowired(required = true)
|
|
||||||
protected ItemService itemService;
|
|
||||||
@Autowired(required = true)
|
|
||||||
protected MetadataFieldService metadataFieldService;
|
|
||||||
@Autowired(required = true)
|
|
||||||
protected MetadataSchemaService metadataSchemaService;
|
|
||||||
@Autowired(required = true)
|
|
||||||
protected WorkspaceItemService workspaceItemService;
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public List<String> generateOutput(RecordSet recordSet) {
|
|
||||||
|
|
||||||
log.info("BTE OutputGenerator started. Records to output: "
|
|
||||||
+ recordSet.getRecords().size());
|
|
||||||
|
|
||||||
// Printing debug message
|
|
||||||
String totalString = "";
|
|
||||||
for (Record record : recordSet.getRecords()) {
|
|
||||||
totalString += SubmissionLookupUtils.getPrintableString(record)
|
|
||||||
+ "\n";
|
|
||||||
}
|
|
||||||
log.debug("Records to output:\n" + totalString);
|
|
||||||
|
|
||||||
witems = new ArrayList<WorkspaceItem>();
|
|
||||||
|
|
||||||
for (Record rec : recordSet.getRecords()) {
|
|
||||||
try {
|
|
||||||
WorkspaceItem wi = workspaceItemService.create(context, collection,
|
|
||||||
true);
|
|
||||||
merge(formName, wi.getItem(), rec);
|
|
||||||
|
|
||||||
witems.add(wi);
|
|
||||||
|
|
||||||
} catch (AuthorizeException e) {
|
|
||||||
log.error(e.getMessage(), e);
|
|
||||||
} catch (SQLException e) {
|
|
||||||
log.error(e.getMessage(), e);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
return new ArrayList<String>();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public List<String> generateOutput(RecordSet records, DataOutputSpec spec) {
|
|
||||||
return generateOutput(records);
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<WorkspaceItem> getWitems() {
|
|
||||||
return witems;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setContext(Context context) {
|
|
||||||
this.context = context;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setFormName(String formName) {
|
|
||||||
this.formName = formName;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDto(ItemSubmissionLookupDTO dto) {
|
|
||||||
this.dto = dto;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setOutputMap(Map<String, String> outputMap) {
|
|
||||||
// Reverse the key-value pairs
|
|
||||||
this.outputMap = new HashMap<String, String>();
|
|
||||||
for (String key : outputMap.keySet()) {
|
|
||||||
this.outputMap.put(outputMap.get(key), key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setCollection(Collection collection) {
|
|
||||||
this.collection = collection;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setExtraMetadataToKeep(List<String> extraMetadataToKeep) {
|
|
||||||
this.extraMetadataToKeep = extraMetadataToKeep;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Methods
|
|
||||||
public void merge(String formName, Item item, Record record) {
|
|
||||||
try {
|
|
||||||
Record itemLookup = record;
|
|
||||||
|
|
||||||
Set<String> addedMetadata = new HashSet<String>();
|
|
||||||
for (String field : itemLookup.getFields()) {
|
|
||||||
String metadata = getMetadata(formName, itemLookup, field);
|
|
||||||
if (StringUtils.isBlank(metadata)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (itemService.getMetadataByMetadataString(item, metadata).size() == 0
|
|
||||||
|| addedMetadata.contains(metadata)) {
|
|
||||||
addedMetadata.add(metadata);
|
|
||||||
String[] md = splitMetadata(metadata);
|
|
||||||
if (isValidMetadata(formName, md)) { // if in extra metadata or in the spefific form
|
|
||||||
List<Value> values = itemLookup.getValues(field);
|
|
||||||
if (values != null && values.size() > 0) {
|
|
||||||
if (isRepeatableMetadata(formName, md)) { // if metadata is repeatable in form
|
|
||||||
for (Value value : values) {
|
|
||||||
String[] splitValue = splitValue(value
|
|
||||||
.getAsString());
|
|
||||||
if (splitValue[3] != null) {
|
|
||||||
itemService.addMetadata(context, item, md[0], md[1], md[2],
|
|
||||||
md[3], splitValue[0],
|
|
||||||
splitValue[1],
|
|
||||||
Integer.parseInt(splitValue[2]));
|
|
||||||
} else {
|
|
||||||
itemService.addMetadata(context, item, md[0], md[1], md[2],
|
|
||||||
md[3], value.getAsString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
String value = values.iterator().next()
|
|
||||||
.getAsString();
|
|
||||||
String[] splitValue = splitValue(value);
|
|
||||||
if (splitValue[3] != null) {
|
|
||||||
itemService.addMetadata(context, item, md[0], md[1], md[2], md[3],
|
|
||||||
splitValue[0], splitValue[1],
|
|
||||||
Integer.parseInt(splitValue[2]));
|
|
||||||
} else {
|
|
||||||
itemService.addMetadata(context, item, md[0], md[1], md[2], md[3],
|
|
||||||
value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
itemService.update(context, item);
|
|
||||||
} catch (SQLException e) {
|
|
||||||
log.error(e.getMessage(), e);
|
|
||||||
} catch (AuthorizeException e) {
|
|
||||||
log.error(e.getMessage(), e);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
protected String getMetadata(String formName, Record itemLookup, String name) {
|
|
||||||
String type = SubmissionLookupService.getType(itemLookup);
|
|
||||||
|
|
||||||
String md = outputMap.get(type + "." + name);
|
|
||||||
if (StringUtils.isBlank(md)) {
|
|
||||||
md = outputMap.get(formName + "." + name);
|
|
||||||
if (StringUtils.isBlank(md)) {
|
|
||||||
md = outputMap.get(name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// KSTA:ToDo: Make this a modifier
|
|
||||||
if (md != null && md.contains("|")) {
|
|
||||||
String[] cond = md.trim().split("\\|");
|
|
||||||
for (int idx = 1; idx < cond.length; idx++) {
|
|
||||||
boolean temp = itemLookup.getFields().contains(cond[idx]);
|
|
||||||
if (temp) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return cond[0];
|
|
||||||
}
|
|
||||||
return md;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected String[] splitMetadata(String metadata) {
|
|
||||||
String[] mdSplit = new String[3];
|
|
||||||
if (StringUtils.isNotBlank(metadata)) {
|
|
||||||
String tmpSplit[] = metadata.split("\\.");
|
|
||||||
if (tmpSplit.length == 4) {
|
|
||||||
mdSplit = new String[4];
|
|
||||||
mdSplit[0] = tmpSplit[0];
|
|
||||||
mdSplit[1] = tmpSplit[1];
|
|
||||||
mdSplit[2] = tmpSplit[2];
|
|
||||||
mdSplit[3] = tmpSplit[3];
|
|
||||||
} else if (tmpSplit.length == 3) {
|
|
||||||
mdSplit = new String[4];
|
|
||||||
mdSplit[0] = tmpSplit[0];
|
|
||||||
mdSplit[1] = tmpSplit[1];
|
|
||||||
mdSplit[2] = tmpSplit[2];
|
|
||||||
mdSplit[3] = null;
|
|
||||||
} else if (tmpSplit.length == 2) {
|
|
||||||
mdSplit = new String[4];
|
|
||||||
mdSplit[0] = tmpSplit[0];
|
|
||||||
mdSplit[1] = tmpSplit[1];
|
|
||||||
mdSplit[2] = null;
|
|
||||||
mdSplit[3] = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return mdSplit;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected boolean isValidMetadata(String formName, String[] md) {
|
|
||||||
try {
|
|
||||||
if (extraMetadataToKeep != null
|
|
||||||
&& extraMetadataToKeep.contains(StringUtils.join(
|
|
||||||
Arrays.copyOfRange(md, 0, 3), "."))) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return getDCInput(formName, md[0], md[1], md[2]) != null;
|
|
||||||
} catch (Exception e) {
|
|
||||||
log.error(e.getMessage(), e);
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected DCInput getDCInput(String formName, String schema, String element,
|
|
||||||
String qualifier) throws DCInputsReaderException {
|
|
||||||
List<DCInputSet> dcinputsets = new DCInputsReader().getInputsBySubmissionName(formName);
|
|
||||||
for (DCInputSet dcinputset : dcinputsets) {
|
|
||||||
for (DCInput[] dcrow : dcinputset.getFields()) {
|
|
||||||
for (DCInput dcinput : dcrow) {
|
|
||||||
if (dcinput.getSchema().equals(schema)
|
|
||||||
&& dcinput.getElement().equals(element)
|
|
||||||
&& ((dcinput.getQualifier() != null && dcinput
|
|
||||||
.getQualifier().equals(qualifier))
|
|
||||||
|| (dcinput.getQualifier() == null && qualifier == null))) {
|
|
||||||
return dcinput;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected boolean isRepeatableMetadata(String formName, String[] md) {
|
|
||||||
try {
|
|
||||||
DCInput dcinput = getDCInput(formName, md[0], md[1], md[2]);
|
|
||||||
if (dcinput != null) {
|
|
||||||
return dcinput.isRepeatable();
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
} catch (Exception e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected String[] splitValue(String value) {
|
|
||||||
String[] splitted = value
|
|
||||||
.split(SubmissionLookupService.SEPARATOR_VALUE_REGEX);
|
|
||||||
String[] result = new String[6];
|
|
||||||
result[0] = splitted[0];
|
|
||||||
result[2] = "-1";
|
|
||||||
result[3] = "-1";
|
|
||||||
result[4] = "-1";
|
|
||||||
if (splitted.length > 1) {
|
|
||||||
result[5] = "splitted";
|
|
||||||
if (StringUtils.isNotBlank(splitted[1])) {
|
|
||||||
result[1] = splitted[1];
|
|
||||||
}
|
|
||||||
if (splitted.length > 2) {
|
|
||||||
result[2] = String.valueOf(Integer.parseInt(splitted[2]));
|
|
||||||
if (splitted.length > 3) {
|
|
||||||
result[3] = String.valueOf(Integer.parseInt(splitted[3]));
|
|
||||||
if (splitted.length > 4) {
|
|
||||||
result[4] = String.valueOf(Integer
|
|
||||||
.parseInt(splitted[4]));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void makeSureMetadataExist(Context context, String schema,
|
|
||||||
String element, String qualifier) {
|
|
||||||
try {
|
|
||||||
context.turnOffAuthorisationSystem();
|
|
||||||
boolean create = false;
|
|
||||||
MetadataSchema mdschema = metadataSchemaService.find(context, schema);
|
|
||||||
MetadataField mdfield = null;
|
|
||||||
if (mdschema == null) {
|
|
||||||
mdschema = metadataSchemaService.create(context, schema,
|
|
||||||
SubmissionLookupService.SL_NAMESPACE_PREFIX + schema
|
|
||||||
);
|
|
||||||
create = true;
|
|
||||||
} else {
|
|
||||||
mdfield = metadataFieldService.findByElement(context,
|
|
||||||
mdschema, element, qualifier);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (mdfield == null) {
|
|
||||||
metadataFieldService.create(context, mdschema, element, qualifier,
|
|
||||||
"Campo utilizzato per la cache del provider submission-lookup: "
|
|
||||||
+ schema);
|
|
||||||
create = true;
|
|
||||||
}
|
|
||||||
if (create) {
|
|
||||||
context.complete();
|
|
||||||
}
|
|
||||||
context.restoreAuthSystemState();
|
|
||||||
} catch (Exception e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,64 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.AbstractModifier;
|
|
||||||
import gr.ekt.bte.core.MutableRecord;
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import gr.ekt.bte.core.Value;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Andrea Bollini
|
|
||||||
* @author Kostas Stamatis
|
|
||||||
* @author Luigi Andrea Pascarelli
|
|
||||||
* @author Panagiotis Koutsourakis
|
|
||||||
*/
|
|
||||||
public class FieldMergeModifier extends AbstractModifier {
|
|
||||||
protected Map<String, List<String>> mergeFieldMap;
|
|
||||||
|
|
||||||
public FieldMergeModifier() {
|
|
||||||
super("FieldMergeModifier");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Record modify(MutableRecord rec) {
|
|
||||||
if (mergeFieldMap != null) {
|
|
||||||
for (String target_field : mergeFieldMap.keySet()) {
|
|
||||||
List<String> source_fields = mergeFieldMap.get(target_field);
|
|
||||||
for (String source_field : source_fields) {
|
|
||||||
List<Value> values = rec.getValues(source_field);
|
|
||||||
if (values != null && values.size() > 0) {
|
|
||||||
for (Value value : values) {
|
|
||||||
rec.addValue(target_field, value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// rec.removeField(source_field);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return rec;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return the merge_field_map
|
|
||||||
*/
|
|
||||||
public Map<String, List<String>> getMergeFieldMap() {
|
|
||||||
return mergeFieldMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param merge_field_map the merge_field_map to set
|
|
||||||
*/
|
|
||||||
public void setMergeFieldMap(Map<String, List<String>> merge_field_map) {
|
|
||||||
this.mergeFieldMap = merge_field_map;
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,78 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.MissingResourceException;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.AbstractModifier;
|
|
||||||
import gr.ekt.bte.core.MutableRecord;
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import gr.ekt.bte.core.StringValue;
|
|
||||||
import gr.ekt.bte.core.Value;
|
|
||||||
import org.springframework.beans.factory.InitializingBean;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Modifier to covert ISO 639-2 alpha-3 code to ISO 639-1 alpha-2 code
|
|
||||||
*
|
|
||||||
* @author Keiji Suzuki
|
|
||||||
*/
|
|
||||||
public class LanguageCodeModifier extends AbstractModifier implements InitializingBean {
|
|
||||||
protected static Map<String, String> lang3to2 = null;
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void afterPropertiesSet() throws Exception {
|
|
||||||
lang3to2 = new HashMap<String, String>();
|
|
||||||
for (Locale locale : Locale.getAvailableLocales()) {
|
|
||||||
try {
|
|
||||||
lang3to2.put(locale.getISO3Language(), locale.getLanguage());
|
|
||||||
} catch (MissingResourceException e) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public LanguageCodeModifier() {
|
|
||||||
super("LanguageCodeModifier");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Record modify(MutableRecord rec) {
|
|
||||||
List<Value> old_values = rec.getValues("language");
|
|
||||||
if (old_values == null || old_values.size() == 0) {
|
|
||||||
return rec;
|
|
||||||
}
|
|
||||||
|
|
||||||
List<Value> new_values = new ArrayList<Value>();
|
|
||||||
for (Value value : old_values) {
|
|
||||||
String lang3 = value.getAsString();
|
|
||||||
String lang2 = lang3.length() == 3 ? getLang2(lang3) : lang3;
|
|
||||||
new_values.add(new StringValue(lang2));
|
|
||||||
}
|
|
||||||
|
|
||||||
rec.updateField("language", new_values);
|
|
||||||
|
|
||||||
return rec;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Covert ISO 639-2 alpha-3 code to ISO 639-1 alpha-2 code
|
|
||||||
*
|
|
||||||
* @param lang3 ISO 639-1 alpha-3 language code
|
|
||||||
* @return String ISO 639-1 alpha-2 language code ("other" if code is not alpha-2)
|
|
||||||
*/
|
|
||||||
protected String getLang2(String lang3) {
|
|
||||||
return lang3to2.containsKey(lang3) ? lang3to2.get(lang3) : "other";
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@@ -1,40 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Andrea Bollini
|
|
||||||
* @author Kostas Stamatis
|
|
||||||
* @author Luigi Andrea Pascarelli
|
|
||||||
* @author Panagiotis Koutsourakis
|
|
||||||
*/
|
|
||||||
public class LookupProvidersCheck {
|
|
||||||
private List<String> providersOk = new ArrayList<String>();
|
|
||||||
|
|
||||||
private List<String> providersErr = new ArrayList<String>();
|
|
||||||
|
|
||||||
public List<String> getProvidersOk() {
|
|
||||||
return providersOk;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setProvidersOk(List<String> providersOk) {
|
|
||||||
this.providersOk = providersOk;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<String> getProvidersErr() {
|
|
||||||
return providersErr;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setProvidersErr(List<String> providersErr) {
|
|
||||||
this.providersErr = providersErr;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@@ -1,181 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.FileInputStream;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Properties;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.AbstractModifier;
|
|
||||||
import gr.ekt.bte.core.MutableRecord;
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import gr.ekt.bte.core.StringValue;
|
|
||||||
import gr.ekt.bte.core.Value;
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
import org.dspace.services.ConfigurationService;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Andrea Bollini
|
|
||||||
* @author Kostas Stamatis
|
|
||||||
* @author Luigi Andrea Pascarelli
|
|
||||||
* @author Panagiotis Koutsourakis
|
|
||||||
*/
|
|
||||||
public class MapConverterModifier extends AbstractModifier {
|
|
||||||
|
|
||||||
protected String mappingFile; //The properties absolute filename
|
|
||||||
|
|
||||||
protected String converterNameFile; //The properties filename
|
|
||||||
|
|
||||||
protected ConfigurationService configurationService;
|
|
||||||
|
|
||||||
protected Map<String, String> mapping;
|
|
||||||
|
|
||||||
protected String defaultValue = "";
|
|
||||||
|
|
||||||
protected List<String> fieldKeys;
|
|
||||||
|
|
||||||
protected Map<String, String> regexConfig = new HashMap<String, String>();
|
|
||||||
|
|
||||||
public final String REGEX_PREFIX = "regex.";
|
|
||||||
|
|
||||||
public void init() {
|
|
||||||
this.mappingFile = configurationService.getProperty(
|
|
||||||
"dspace.dir") + File.separator + "config" + File.separator + "crosswalks" + File.separator +
|
|
||||||
converterNameFile;
|
|
||||||
|
|
||||||
this.mapping = new HashMap<String, String>();
|
|
||||||
|
|
||||||
FileInputStream fis = null;
|
|
||||||
try {
|
|
||||||
fis = new FileInputStream(new File(mappingFile));
|
|
||||||
Properties mapConfig = new Properties();
|
|
||||||
mapConfig.load(fis);
|
|
||||||
fis.close();
|
|
||||||
for (Object key : mapConfig.keySet()) {
|
|
||||||
String keyS = (String) key;
|
|
||||||
if (keyS.startsWith(REGEX_PREFIX)) {
|
|
||||||
String regex = keyS.substring(REGEX_PREFIX.length());
|
|
||||||
String regReplace = mapping.get(keyS);
|
|
||||||
if (regReplace == null) {
|
|
||||||
regReplace = "";
|
|
||||||
} else if (regReplace.equalsIgnoreCase("@ident@")) {
|
|
||||||
regReplace = "$0";
|
|
||||||
}
|
|
||||||
regexConfig.put(regex, regReplace);
|
|
||||||
}
|
|
||||||
if (mapConfig.getProperty(keyS) != null) {
|
|
||||||
mapping.put(keyS, mapConfig.getProperty(keyS));
|
|
||||||
} else {
|
|
||||||
mapping.put(keyS, "");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (Exception e) {
|
|
||||||
throw new IllegalArgumentException("", e);
|
|
||||||
} finally {
|
|
||||||
if (fis != null) {
|
|
||||||
try {
|
|
||||||
fis.close();
|
|
||||||
} catch (IOException ioe) {
|
|
||||||
// ...
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (String keyS : mapping.keySet()) {
|
|
||||||
if (keyS.startsWith(REGEX_PREFIX)) {
|
|
||||||
String regex = keyS.substring(REGEX_PREFIX.length());
|
|
||||||
String regReplace = mapping.get(keyS);
|
|
||||||
if (regReplace == null) {
|
|
||||||
regReplace = "";
|
|
||||||
} else if (regReplace.equalsIgnoreCase("@ident@")) {
|
|
||||||
regReplace = "$0";
|
|
||||||
}
|
|
||||||
regexConfig.put(regex, regReplace);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param name Name of file to load ArXiv data from.
|
|
||||||
*/
|
|
||||||
public MapConverterModifier(String name) {
|
|
||||||
super(name);
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* (non-Javadoc)
|
|
||||||
*
|
|
||||||
* @see
|
|
||||||
* gr.ekt.bte.core.AbstractModifier#modify(gr.ekt.bte.core.MutableRecord)
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public Record modify(MutableRecord record) {
|
|
||||||
if (mapping != null && fieldKeys != null) {
|
|
||||||
for (String key : fieldKeys) {
|
|
||||||
List<Value> values = record.getValues(key);
|
|
||||||
|
|
||||||
if (values == null) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
List<Value> newValues = new ArrayList<Value>();
|
|
||||||
|
|
||||||
for (Value value : values) {
|
|
||||||
String stringValue = value.getAsString();
|
|
||||||
|
|
||||||
String tmp = "";
|
|
||||||
if (mapping.containsKey(stringValue)) {
|
|
||||||
tmp = mapping.get(stringValue);
|
|
||||||
} else {
|
|
||||||
tmp = defaultValue;
|
|
||||||
for (String regex : regexConfig.keySet()) {
|
|
||||||
if (stringValue != null
|
|
||||||
&& stringValue.matches(regex)) {
|
|
||||||
tmp = stringValue.replaceAll(regex,
|
|
||||||
regexConfig.get(regex));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if ("@@ident@@".equals(tmp)) {
|
|
||||||
newValues.add(new StringValue(stringValue));
|
|
||||||
} else if (StringUtils.isNotBlank(tmp)) {
|
|
||||||
newValues.add(new StringValue(tmp));
|
|
||||||
} else {
|
|
||||||
newValues.add(new StringValue(stringValue));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
record.updateField(key, newValues);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return record;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public void setFieldKeys(List<String> fieldKeys) {
|
|
||||||
this.fieldKeys = fieldKeys;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDefaultValue(String defaultValue) {
|
|
||||||
this.defaultValue = defaultValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setConverterNameFile(String converterNameFile) {
|
|
||||||
this.converterNameFile = converterNameFile;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setConfigurationService(ConfigurationService configurationService) {
|
|
||||||
this.configurationService = configurationService;
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,291 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.DataLoader;
|
|
||||||
import gr.ekt.bte.core.DataLoadingSpec;
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import gr.ekt.bte.core.RecordSet;
|
|
||||||
import gr.ekt.bte.core.StringValue;
|
|
||||||
import gr.ekt.bte.dataloader.FileDataLoader;
|
|
||||||
import gr.ekt.bte.exceptions.MalformedSourceException;
|
|
||||||
import org.apache.logging.log4j.LogManager;
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Andrea Bollini
|
|
||||||
* @author Kostas Stamatis
|
|
||||||
* @author Luigi Andrea Pascarelli
|
|
||||||
* @author Panagiotis Koutsourakis
|
|
||||||
*/
|
|
||||||
public class MultipleSubmissionLookupDataLoader implements DataLoader {
|
|
||||||
|
|
||||||
private static Logger log = LogManager.getLogger(MultipleSubmissionLookupDataLoader.class);
|
|
||||||
|
|
||||||
protected final String NOT_FOUND_DOI = "NOT-FOUND-DOI";
|
|
||||||
|
|
||||||
Map<String, DataLoader> dataloadersMap;
|
|
||||||
|
|
||||||
// Depending on these values, the multiple data loader loads data from the
|
|
||||||
// appropriate providers
|
|
||||||
Map<String, Set<String>> identifiers = null; // Searching by identifiers
|
|
||||||
// (DOI ...)
|
|
||||||
|
|
||||||
Map<String, Set<String>> searchTerms = null; // Searching by author, title,
|
|
||||||
// date
|
|
||||||
|
|
||||||
String filename = null; // Uploading file
|
|
||||||
|
|
||||||
String type = null; // the type of the upload file (bibtex, etc.)
|
|
||||||
|
|
||||||
/*
|
|
||||||
* (non-Javadoc)
|
|
||||||
*
|
|
||||||
* @see gr.ekt.bte.core.DataLoader#getRecords()
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public RecordSet getRecords() throws MalformedSourceException {
|
|
||||||
|
|
||||||
RecordSet recordSet = new RecordSet();
|
|
||||||
|
|
||||||
// KSTA:ToDo: Support timeout (problematic) providers
|
|
||||||
// List<String> timeoutProviders = new ArrayList<String>();
|
|
||||||
for (String providerName : filterProviders().keySet()) {
|
|
||||||
DataLoader provider = dataloadersMap.get(providerName);
|
|
||||||
RecordSet subRecordSet = provider.getRecords();
|
|
||||||
recordSet.addAll(subRecordSet);
|
|
||||||
// Add in each record the provider name... a new provider doesn't
|
|
||||||
// need to know about it!
|
|
||||||
for (Record record : subRecordSet.getRecords()) {
|
|
||||||
if (record.isMutable()) {
|
|
||||||
record.makeMutable().addValue(
|
|
||||||
SubmissionLookupService.PROVIDER_NAME_FIELD,
|
|
||||||
new StringValue(providerName));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Question: Do we want that in case of file data loader?
|
|
||||||
// for each publication in the record set, if it has a DOI, try to find
|
|
||||||
// extra pubs from the other providers
|
|
||||||
if (searchTerms != null
|
|
||||||
|| (identifiers != null && !identifiers
|
|
||||||
.containsKey(SubmissionLookupDataLoader.DOI))) { // Extend
|
|
||||||
Map<String, Set<String>> provider2foundDOIs = new HashMap<String, Set<String>>();
|
|
||||||
List<String> foundDOIs = new ArrayList<String>();
|
|
||||||
|
|
||||||
for (Record publication : recordSet.getRecords()) {
|
|
||||||
String providerName = SubmissionLookupUtils.getFirstValue(
|
|
||||||
publication,
|
|
||||||
SubmissionLookupService.PROVIDER_NAME_FIELD);
|
|
||||||
|
|
||||||
String doi = null;
|
|
||||||
|
|
||||||
if (publication.getValues(SubmissionLookupDataLoader.DOI) != null
|
|
||||||
&& publication
|
|
||||||
.getValues(SubmissionLookupDataLoader.DOI)
|
|
||||||
.size() > 0) {
|
|
||||||
doi = publication.getValues(SubmissionLookupDataLoader.DOI)
|
|
||||||
.iterator().next().getAsString();
|
|
||||||
}
|
|
||||||
if (doi == null) {
|
|
||||||
doi = NOT_FOUND_DOI;
|
|
||||||
} else {
|
|
||||||
doi = SubmissionLookupUtils.normalizeDOI(doi);
|
|
||||||
if (!foundDOIs.contains(doi)) {
|
|
||||||
foundDOIs.add(doi);
|
|
||||||
}
|
|
||||||
Set<String> tmp = provider2foundDOIs.get(providerName);
|
|
||||||
if (tmp == null) {
|
|
||||||
tmp = new HashSet<String>();
|
|
||||||
provider2foundDOIs.put(providerName, tmp);
|
|
||||||
}
|
|
||||||
tmp.add(doi);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (String providerName : dataloadersMap.keySet()) {
|
|
||||||
DataLoader genProvider = dataloadersMap.get(providerName);
|
|
||||||
|
|
||||||
if (!(genProvider instanceof SubmissionLookupDataLoader)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
SubmissionLookupDataLoader provider = (SubmissionLookupDataLoader) genProvider;
|
|
||||||
|
|
||||||
// Provider must support DOI
|
|
||||||
if (!provider.getSupportedIdentifiers().contains(
|
|
||||||
SubmissionLookupDataLoader.DOI)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// if (evictProviders != null
|
|
||||||
// && evictProviders.contains(provider.getShortName())) {
|
|
||||||
// continue;
|
|
||||||
// }
|
|
||||||
Set<String> doiToSearch = new HashSet<String>();
|
|
||||||
Set<String> alreadyFoundDOIs = provider2foundDOIs
|
|
||||||
.get(providerName);
|
|
||||||
for (String doi : foundDOIs) {
|
|
||||||
if (alreadyFoundDOIs == null
|
|
||||||
|| !alreadyFoundDOIs.contains(doi)) {
|
|
||||||
doiToSearch.add(doi);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
List<Record> pPublications = null;
|
|
||||||
Context context = null;
|
|
||||||
try {
|
|
||||||
if (doiToSearch.size() > 0) {
|
|
||||||
context = new Context();
|
|
||||||
pPublications = provider.getByDOIs(context, doiToSearch);
|
|
||||||
}
|
|
||||||
} catch (Exception e) {
|
|
||||||
log.error(e.getMessage(), e);
|
|
||||||
} finally {
|
|
||||||
if (context != null && context.isValid()) {
|
|
||||||
context.abort();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (pPublications != null) {
|
|
||||||
for (Record rec : pPublications) {
|
|
||||||
recordSet.addRecord(rec);
|
|
||||||
if (rec.isMutable()) {
|
|
||||||
rec.makeMutable().addValue(
|
|
||||||
SubmissionLookupService.PROVIDER_NAME_FIELD,
|
|
||||||
new StringValue(providerName));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
log.info("BTE DataLoader finished. Items loaded: "
|
|
||||||
+ recordSet.getRecords().size());
|
|
||||||
|
|
||||||
// Printing debug message
|
|
||||||
String totalString = "";
|
|
||||||
for (Record record : recordSet.getRecords()) {
|
|
||||||
totalString += SubmissionLookupUtils.getPrintableString(record)
|
|
||||||
+ "\n";
|
|
||||||
}
|
|
||||||
log.debug("Records loaded:\n" + totalString);
|
|
||||||
|
|
||||||
return recordSet;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* (non-Javadoc)
|
|
||||||
*
|
|
||||||
* @see
|
|
||||||
* gr.ekt.bte.core.DataLoader#getRecords(gr.ekt.bte.core.DataLoadingSpec)
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public RecordSet getRecords(DataLoadingSpec loadingSpec)
|
|
||||||
throws MalformedSourceException {
|
|
||||||
|
|
||||||
// Identify the end of loading
|
|
||||||
if (loadingSpec.getOffset() > 0) {
|
|
||||||
return new RecordSet();
|
|
||||||
}
|
|
||||||
|
|
||||||
return getRecords();
|
|
||||||
}
|
|
||||||
|
|
||||||
public Map<String, DataLoader> getProvidersMap() {
|
|
||||||
return dataloadersMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDataloadersMap(Map<String, DataLoader> providersMap) {
|
|
||||||
this.dataloadersMap = providersMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setIdentifiers(Map<String, Set<String>> identifiers) {
|
|
||||||
this.identifiers = identifiers;
|
|
||||||
this.filename = null;
|
|
||||||
this.searchTerms = null;
|
|
||||||
|
|
||||||
if (dataloadersMap != null) {
|
|
||||||
for (String providerName : dataloadersMap.keySet()) {
|
|
||||||
DataLoader provider = dataloadersMap.get(providerName);
|
|
||||||
if (provider instanceof NetworkSubmissionLookupDataLoader) {
|
|
||||||
((NetworkSubmissionLookupDataLoader) provider)
|
|
||||||
.setIdentifiers(identifiers);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setSearchTerms(Map<String, Set<String>> searchTerms) {
|
|
||||||
this.searchTerms = searchTerms;
|
|
||||||
this.identifiers = null;
|
|
||||||
this.filename = null;
|
|
||||||
|
|
||||||
if (dataloadersMap != null) {
|
|
||||||
for (String providerName : dataloadersMap.keySet()) {
|
|
||||||
DataLoader provider = dataloadersMap.get(providerName);
|
|
||||||
if (provider instanceof NetworkSubmissionLookupDataLoader) {
|
|
||||||
((NetworkSubmissionLookupDataLoader) provider)
|
|
||||||
.setSearchTerms(searchTerms);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setFile(String filename, String type) {
|
|
||||||
this.filename = filename;
|
|
||||||
this.type = type;
|
|
||||||
this.identifiers = null;
|
|
||||||
this.searchTerms = null;
|
|
||||||
|
|
||||||
if (dataloadersMap != null) {
|
|
||||||
for (String providerName : dataloadersMap.keySet()) {
|
|
||||||
DataLoader provider = dataloadersMap.get(providerName);
|
|
||||||
if (provider instanceof FileDataLoader) {
|
|
||||||
((FileDataLoader) provider).setFilename(filename);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public Map<String, DataLoader> filterProviders() {
|
|
||||||
Map<String, DataLoader> result = new HashMap<String, DataLoader>();
|
|
||||||
for (String providerName : dataloadersMap.keySet()) {
|
|
||||||
DataLoader dataLoader = dataloadersMap.get(providerName);
|
|
||||||
if (searchTerms != null && identifiers == null && filename == null) {
|
|
||||||
if (dataLoader instanceof SubmissionLookupDataLoader &&
|
|
||||||
((SubmissionLookupDataLoader) dataLoader).isSearchProvider()) {
|
|
||||||
result.put(providerName, dataLoader);
|
|
||||||
}
|
|
||||||
} else if (searchTerms == null && identifiers != null && filename == null) {
|
|
||||||
if (dataLoader instanceof SubmissionLookupDataLoader) {
|
|
||||||
result.put(providerName, dataLoader);
|
|
||||||
}
|
|
||||||
} else if (searchTerms == null && identifiers == null
|
|
||||||
&& filename != null) {
|
|
||||||
if (dataLoader instanceof FileDataLoader) {
|
|
||||||
// add only the one that we are interested in
|
|
||||||
if (providerName.endsWith(type)) {
|
|
||||||
result.put(providerName, dataLoader);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,150 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.Calendar;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.DataLoadingSpec;
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import gr.ekt.bte.core.RecordSet;
|
|
||||||
import gr.ekt.bte.core.Value;
|
|
||||||
import gr.ekt.bte.exceptions.MalformedSourceException;
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
import org.apache.http.HttpException;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Andrea Bollini
|
|
||||||
* @author Kostas Stamatis
|
|
||||||
* @author Luigi Andrea Pascarelli
|
|
||||||
* @author Panagiotis Koutsourakis
|
|
||||||
*/
|
|
||||||
public abstract class NetworkSubmissionLookupDataLoader implements
|
|
||||||
SubmissionLookupDataLoader {
|
|
||||||
|
|
||||||
Map<String, Set<String>> identifiers; // Searching by identifiers (DOI ...)
|
|
||||||
|
|
||||||
Map<String, Set<String>> searchTerms; // Searching by author, title, date
|
|
||||||
|
|
||||||
Map<String, String> fieldMap; // mapping between service fields and local
|
|
||||||
// intermediate fields
|
|
||||||
|
|
||||||
String providerName;
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public List<Record> getByDOIs(Context context, Set<String> doiToSearch)
|
|
||||||
throws HttpException, IOException {
|
|
||||||
|
|
||||||
Map<String, Set<String>> keys = new HashMap<String, Set<String>>();
|
|
||||||
keys.put(DOI, doiToSearch);
|
|
||||||
|
|
||||||
return getByIdentifier(context, keys);
|
|
||||||
}
|
|
||||||
|
|
||||||
// BTE Data Loader interface methods
|
|
||||||
@Override
|
|
||||||
public RecordSet getRecords() throws MalformedSourceException {
|
|
||||||
|
|
||||||
RecordSet recordSet = new RecordSet();
|
|
||||||
|
|
||||||
List<Record> results = null;
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (getIdentifiers() != null) { // Search by identifiers
|
|
||||||
results = getByIdentifier(null, getIdentifiers());
|
|
||||||
} else {
|
|
||||||
String title = getSearchTerms().get("title") != null ? getSearchTerms()
|
|
||||||
.get("title").iterator().next()
|
|
||||||
: null;
|
|
||||||
String authors = getSearchTerms().get("authors") != null ? getSearchTerms()
|
|
||||||
.get("authors").iterator().next()
|
|
||||||
: null;
|
|
||||||
String year = getSearchTerms().get("year") != null ? getSearchTerms()
|
|
||||||
.get("year").iterator().next()
|
|
||||||
: String.valueOf(Calendar.getInstance().get(Calendar.YEAR));
|
|
||||||
int yearInt = Integer.parseInt(year);
|
|
||||||
results = search(null, title, authors, yearInt);
|
|
||||||
}
|
|
||||||
} catch (HttpException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
} catch (IOException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (results != null) {
|
|
||||||
for (Record record : results) {
|
|
||||||
recordSet.addRecord(record);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return recordSet;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public RecordSet getRecords(DataLoadingSpec arg0)
|
|
||||||
throws MalformedSourceException {
|
|
||||||
|
|
||||||
return getRecords();
|
|
||||||
}
|
|
||||||
|
|
||||||
public Map<String, Set<String>> getIdentifiers() {
|
|
||||||
return identifiers;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setIdentifiers(Map<String, Set<String>> identifiers) {
|
|
||||||
this.identifiers = identifiers;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Map<String, Set<String>> getSearchTerms() {
|
|
||||||
return searchTerms;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setSearchTerms(Map<String, Set<String>> searchTerms) {
|
|
||||||
this.searchTerms = searchTerms;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Map<String, String> getFieldMap() {
|
|
||||||
return fieldMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setFieldMap(Map<String, String> fieldMap) {
|
|
||||||
this.fieldMap = fieldMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setProviderName(String providerName) {
|
|
||||||
this.providerName = providerName;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Record convertFields(Record publication) {
|
|
||||||
for (String fieldName : fieldMap.keySet()) {
|
|
||||||
String md = null;
|
|
||||||
if (fieldMap != null) {
|
|
||||||
md = this.fieldMap.get(fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (StringUtils.isBlank(md)) {
|
|
||||||
continue;
|
|
||||||
} else {
|
|
||||||
md = md.trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (publication.isMutable()) {
|
|
||||||
List<Value> values = publication.getValues(fieldName);
|
|
||||||
publication.makeMutable().removeField(fieldName);
|
|
||||||
publication.makeMutable().addField(md, values);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return publication;
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,75 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.AbstractModifier;
|
|
||||||
import gr.ekt.bte.core.MutableRecord;
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import gr.ekt.bte.core.StringValue;
|
|
||||||
import gr.ekt.bte.core.Value;
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Andrea Bollini
|
|
||||||
* @author Kostas Stamatis
|
|
||||||
* @author Luigi Andrea Pascarelli
|
|
||||||
* @author Panagiotis Koutsourakis
|
|
||||||
*/
|
|
||||||
public class RemoveLastDotModifier extends AbstractModifier {
|
|
||||||
|
|
||||||
List<String> fieldKeys;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param name modifier name
|
|
||||||
*/
|
|
||||||
public RemoveLastDotModifier(String name) {
|
|
||||||
super(name);
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* (non-Javadoc)
|
|
||||||
*
|
|
||||||
* @see
|
|
||||||
* gr.ekt.bte.core.AbstractModifier#modify(gr.ekt.bte.core.MutableRecord)
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public Record modify(MutableRecord record) {
|
|
||||||
if (fieldKeys != null) {
|
|
||||||
for (String key : fieldKeys) {
|
|
||||||
List<Value> values = record.getValues(key);
|
|
||||||
|
|
||||||
List<Value> newValues = new ArrayList<Value>();
|
|
||||||
|
|
||||||
if (values != null) {
|
|
||||||
for (Value value : values) {
|
|
||||||
String valueString = value.getAsString();
|
|
||||||
if (StringUtils.isNotBlank(valueString)
|
|
||||||
&& valueString.endsWith(".")) {
|
|
||||||
newValues.add(new StringValue(valueString
|
|
||||||
.substring(0, valueString.length() - 1)));
|
|
||||||
} else {
|
|
||||||
newValues.add(new StringValue(valueString));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
record.updateField(key, newValues);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return record;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setFieldKeys(List<String> fieldKeys) {
|
|
||||||
this.fieldKeys = fieldKeys;
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,103 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.DataLoader;
|
|
||||||
import gr.ekt.bte.core.DataLoadingSpec;
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import gr.ekt.bte.core.RecordSet;
|
|
||||||
import gr.ekt.bte.exceptions.MalformedSourceException;
|
|
||||||
import org.apache.logging.log4j.LogManager;
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.dspace.submit.util.ItemSubmissionLookupDTO;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Andrea Bollini
|
|
||||||
* @author Kostas Stamatis
|
|
||||||
* @author Luigi Andrea Pascarelli
|
|
||||||
* @author Panagiotis Koutsourakis
|
|
||||||
*/
|
|
||||||
public class SubmissionItemDataLoader implements DataLoader {
|
|
||||||
protected List<ItemSubmissionLookupDTO> dtoList;
|
|
||||||
|
|
||||||
List<DataLoader> providers;
|
|
||||||
|
|
||||||
private static Logger log = LogManager.getLogger(SubmissionItemDataLoader.class);
|
|
||||||
|
|
||||||
public SubmissionItemDataLoader() {
|
|
||||||
dtoList = null;
|
|
||||||
providers = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public RecordSet getRecords() throws MalformedSourceException {
|
|
||||||
if (dtoList == null) {
|
|
||||||
throw new MalformedSourceException("dtoList not initialized");
|
|
||||||
}
|
|
||||||
RecordSet ret = new RecordSet();
|
|
||||||
|
|
||||||
for (ItemSubmissionLookupDTO dto : dtoList) {
|
|
||||||
Record rec = dto.getTotalPublication(providers);
|
|
||||||
ret.addRecord(rec);
|
|
||||||
}
|
|
||||||
|
|
||||||
log.info("BTE DataLoader finished. Items loaded: "
|
|
||||||
+ ret.getRecords().size());
|
|
||||||
|
|
||||||
// Printing debug message
|
|
||||||
String totalString = "";
|
|
||||||
for (Record record : ret.getRecords()) {
|
|
||||||
totalString += SubmissionLookupUtils.getPrintableString(record)
|
|
||||||
+ "\n";
|
|
||||||
}
|
|
||||||
log.debug("Records loaded:\n" + totalString);
|
|
||||||
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public RecordSet getRecords(DataLoadingSpec spec)
|
|
||||||
throws MalformedSourceException {
|
|
||||||
if (spec.getOffset() > 0) {
|
|
||||||
return new RecordSet();
|
|
||||||
}
|
|
||||||
|
|
||||||
return getRecords();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return the dtoList
|
|
||||||
*/
|
|
||||||
public List<ItemSubmissionLookupDTO> getDtoList() {
|
|
||||||
return dtoList;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param dtoList the dtoList to set
|
|
||||||
*/
|
|
||||||
public void setDtoList(List<ItemSubmissionLookupDTO> dtoList) {
|
|
||||||
this.dtoList = dtoList;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return the providers
|
|
||||||
*/
|
|
||||||
public List<DataLoader> getProviders() {
|
|
||||||
return providers;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param providers the providers to set
|
|
||||||
*/
|
|
||||||
public void setProviders(List<DataLoader> providers) {
|
|
||||||
this.providers = providers;
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,55 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.DataLoader;
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import org.apache.http.HttpException;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Andrea Bollini
|
|
||||||
* @author Kostas Stamatis
|
|
||||||
* @author Luigi Andrea Pascarelli
|
|
||||||
* @author Panagiotis Koutsourakis
|
|
||||||
*/
|
|
||||||
public interface SubmissionLookupDataLoader extends DataLoader {
|
|
||||||
|
|
||||||
public final static String DOI = "doi";
|
|
||||||
|
|
||||||
public final static String PUBMED = "pubmed";
|
|
||||||
|
|
||||||
public final static String ARXIV = "arxiv";
|
|
||||||
|
|
||||||
public final static String REPEC = "repec";
|
|
||||||
|
|
||||||
public final static String SCOPUSEID = "scopuseid";
|
|
||||||
|
|
||||||
public final static String CINII = "cinii";
|
|
||||||
|
|
||||||
public final static String TYPE = "type";
|
|
||||||
|
|
||||||
List<String> getSupportedIdentifiers();
|
|
||||||
|
|
||||||
boolean isSearchProvider();
|
|
||||||
|
|
||||||
List<Record> search(Context context, String title, String author, int year)
|
|
||||||
throws HttpException, IOException;
|
|
||||||
|
|
||||||
List<Record> getByIdentifier(Context context, Map<String, Set<String>> keys)
|
|
||||||
throws HttpException, IOException;
|
|
||||||
|
|
||||||
List<Record> getByDOIs(Context context, Set<String> doiToSearch)
|
|
||||||
throws HttpException, IOException;
|
|
||||||
|
|
||||||
}
|
|
@@ -1,91 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.DataOutputSpec;
|
|
||||||
import gr.ekt.bte.core.OutputGenerator;
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import gr.ekt.bte.core.RecordSet;
|
|
||||||
import gr.ekt.bte.core.Value;
|
|
||||||
import org.dspace.submit.util.ItemSubmissionLookupDTO;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Andrea Bollini
|
|
||||||
* @author Kostas Stamatis
|
|
||||||
* @author Luigi Andrea Pascarelli
|
|
||||||
* @author Panagiotis Koutsourakis
|
|
||||||
*/
|
|
||||||
public class SubmissionLookupOutputGenerator implements OutputGenerator {
|
|
||||||
protected List<ItemSubmissionLookupDTO> dtoList;
|
|
||||||
|
|
||||||
protected final String DOI_FIELD = "doi";
|
|
||||||
|
|
||||||
protected final String NOT_FOUND_DOI = "NOT-FOUND-DOI";
|
|
||||||
|
|
||||||
public SubmissionLookupOutputGenerator() {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public List<String> generateOutput(RecordSet records) {
|
|
||||||
dtoList = new ArrayList<ItemSubmissionLookupDTO>();
|
|
||||||
|
|
||||||
Map<String, List<Record>> record_sets = new HashMap<String, List<Record>>();
|
|
||||||
int counter = 0;
|
|
||||||
for (Record rec : records) {
|
|
||||||
String current_doi = NOT_FOUND_DOI;
|
|
||||||
List<Value> values = rec.getValues(DOI_FIELD);
|
|
||||||
if (values != null && values.size() > 0) {
|
|
||||||
current_doi = values.get(0).getAsString();
|
|
||||||
} else {
|
|
||||||
current_doi = NOT_FOUND_DOI + "_" + counter;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (record_sets.keySet().contains(current_doi)) {
|
|
||||||
record_sets.get(current_doi).add(rec);
|
|
||||||
} else {
|
|
||||||
ArrayList<Record> publication = new ArrayList<Record>();
|
|
||||||
publication.add(rec);
|
|
||||||
record_sets.put(current_doi, publication);
|
|
||||||
}
|
|
||||||
|
|
||||||
counter++;
|
|
||||||
}
|
|
||||||
for (Map.Entry<String, List<Record>> entry : record_sets.entrySet()) {
|
|
||||||
ItemSubmissionLookupDTO dto = new ItemSubmissionLookupDTO(
|
|
||||||
entry.getValue());
|
|
||||||
dtoList.add(dto);
|
|
||||||
}
|
|
||||||
|
|
||||||
return new ArrayList<String>();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public List<String> generateOutput(RecordSet records, DataOutputSpec spec) {
|
|
||||||
return generateOutput(records);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return the items
|
|
||||||
*/
|
|
||||||
public List<ItemSubmissionLookupDTO> getDtoList() {
|
|
||||||
return dtoList;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param items the items to set
|
|
||||||
*/
|
|
||||||
public void setDtoList(List<ItemSubmissionLookupDTO> items) {
|
|
||||||
this.dtoList = items;
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,194 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import javax.servlet.http.HttpServletRequest;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.DataLoader;
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import gr.ekt.bte.core.TransformationEngine;
|
|
||||||
import gr.ekt.bte.dataloader.FileDataLoader;
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.dspace.submit.util.SubmissionLookupDTO;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Andrea Bollini
|
|
||||||
* @author Kostas Stamatis
|
|
||||||
* @author Luigi Andrea Pascarelli
|
|
||||||
* @author Panagiotis Koutsourakis
|
|
||||||
*/
|
|
||||||
public class SubmissionLookupService {
|
|
||||||
public static final String CFG_MODULE = "submission-lookup";
|
|
||||||
|
|
||||||
public static final String SL_NAMESPACE_PREFIX = "http://www.dspace.org/sl/";
|
|
||||||
|
|
||||||
public static final String MANUAL_USER_INPUT = "manual";
|
|
||||||
|
|
||||||
public static final String PROVIDER_NAME_FIELD = "provider_name_field";
|
|
||||||
|
|
||||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(SubmissionLookupService.class);
|
|
||||||
|
|
||||||
public static final String SEPARATOR_VALUE = "#######";
|
|
||||||
|
|
||||||
public static final String SEPARATOR_VALUE_REGEX = SEPARATOR_VALUE;
|
|
||||||
|
|
||||||
protected List<DataLoader> providers;
|
|
||||||
|
|
||||||
protected Map<String, List<String>> idents2provs;
|
|
||||||
|
|
||||||
protected List<String> searchProviders;
|
|
||||||
|
|
||||||
protected List<String> fileProviders;
|
|
||||||
|
|
||||||
protected TransformationEngine phase1TransformationEngine;
|
|
||||||
|
|
||||||
protected TransformationEngine phase2TransformationEngine;
|
|
||||||
|
|
||||||
protected List<String> detailFields = null;
|
|
||||||
|
|
||||||
public void setPhase2TransformationEngine(
|
|
||||||
TransformationEngine phase2TransformationEngine) {
|
|
||||||
this.phase2TransformationEngine = phase2TransformationEngine;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setPhase1TransformationEngine(
|
|
||||||
TransformationEngine phase1TransformationEngine) {
|
|
||||||
this.phase1TransformationEngine = phase1TransformationEngine;
|
|
||||||
|
|
||||||
MultipleSubmissionLookupDataLoader dataLoader = (MultipleSubmissionLookupDataLoader) phase1TransformationEngine
|
|
||||||
.getDataLoader();
|
|
||||||
|
|
||||||
this.idents2provs = new HashMap<String, List<String>>();
|
|
||||||
this.searchProviders = new ArrayList<String>();
|
|
||||||
this.fileProviders = new ArrayList<String>();
|
|
||||||
|
|
||||||
if (providers == null) {
|
|
||||||
this.providers = new ArrayList<DataLoader>();
|
|
||||||
|
|
||||||
for (String providerName : dataLoader.getProvidersMap().keySet()) {
|
|
||||||
DataLoader p = dataLoader.getProvidersMap().get(providerName);
|
|
||||||
|
|
||||||
this.providers.add(p);
|
|
||||||
|
|
||||||
// Do not do that for file providers
|
|
||||||
if (p instanceof FileDataLoader) {
|
|
||||||
this.fileProviders.add(providerName);
|
|
||||||
} else if (p instanceof NetworkSubmissionLookupDataLoader) {
|
|
||||||
|
|
||||||
NetworkSubmissionLookupDataLoader p2 = (NetworkSubmissionLookupDataLoader) p;
|
|
||||||
|
|
||||||
p2.setProviderName(providerName);
|
|
||||||
|
|
||||||
if (p2.isSearchProvider()) {
|
|
||||||
searchProviders.add(providerName);
|
|
||||||
}
|
|
||||||
List<String> suppIdentifiers = p2.getSupportedIdentifiers();
|
|
||||||
if (suppIdentifiers != null) {
|
|
||||||
for (String ident : suppIdentifiers) {
|
|
||||||
List<String> tmp = idents2provs.get(ident);
|
|
||||||
if (tmp == null) {
|
|
||||||
tmp = new ArrayList<String>();
|
|
||||||
idents2provs.put(ident, tmp);
|
|
||||||
}
|
|
||||||
tmp.add(providerName);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public TransformationEngine getPhase1TransformationEngine() {
|
|
||||||
return phase1TransformationEngine;
|
|
||||||
}
|
|
||||||
|
|
||||||
public TransformationEngine getPhase2TransformationEngine() {
|
|
||||||
return phase2TransformationEngine;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<String> getIdentifiers() {
|
|
||||||
|
|
||||||
List<String> allSupportedIdentifiers = new ArrayList<String>();
|
|
||||||
MultipleSubmissionLookupDataLoader dataLoader = (MultipleSubmissionLookupDataLoader) phase1TransformationEngine
|
|
||||||
.getDataLoader();
|
|
||||||
for (String providerName : dataLoader.getProvidersMap().keySet()) {
|
|
||||||
DataLoader provider = dataLoader.getProvidersMap()
|
|
||||||
.get(providerName);
|
|
||||||
if (provider instanceof SubmissionLookupDataLoader) {
|
|
||||||
for (String identifier : ((SubmissionLookupDataLoader) provider)
|
|
||||||
.getSupportedIdentifiers()) {
|
|
||||||
if (!allSupportedIdentifiers.contains(identifier)) {
|
|
||||||
allSupportedIdentifiers.add(identifier);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return allSupportedIdentifiers;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Map<String, List<String>> getProvidersIdentifiersMap() {
|
|
||||||
return idents2provs;
|
|
||||||
}
|
|
||||||
|
|
||||||
public SubmissionLookupDTO getSubmissionLookupDTO(
|
|
||||||
HttpServletRequest request, String uuidSubmission) {
|
|
||||||
SubmissionLookupDTO dto = (SubmissionLookupDTO) request.getSession()
|
|
||||||
.getAttribute("submission_lookup_" + uuidSubmission);
|
|
||||||
if (dto == null) {
|
|
||||||
dto = new SubmissionLookupDTO();
|
|
||||||
storeDTOs(request, uuidSubmission, dto);
|
|
||||||
}
|
|
||||||
return dto;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void invalidateDTOs(HttpServletRequest request, String uuidSubmission) {
|
|
||||||
request.getSession().removeAttribute(
|
|
||||||
"submission_lookup_" + uuidSubmission);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void storeDTOs(HttpServletRequest request, String uuidSubmission,
|
|
||||||
SubmissionLookupDTO dto) {
|
|
||||||
request.getSession().setAttribute(
|
|
||||||
"submission_lookup_" + uuidSubmission, dto);
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<String> getSearchProviders() {
|
|
||||||
return searchProviders;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<DataLoader> getProviders() {
|
|
||||||
return providers;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static String getProviderName(Record rec) {
|
|
||||||
return SubmissionLookupUtils.getFirstValue(rec,
|
|
||||||
SubmissionLookupService.PROVIDER_NAME_FIELD);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static String getType(Record rec) {
|
|
||||||
return SubmissionLookupUtils.getFirstValue(rec,
|
|
||||||
SubmissionLookupDataLoader.TYPE);
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<String> getFileProviders() {
|
|
||||||
return this.fileProviders;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<String> getDetailFields() {
|
|
||||||
return detailFields;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDetailFields(List<String> detailFields) {
|
|
||||||
this.detailFields = detailFields;
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,156 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.sql.SQLException;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.regex.Pattern;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import gr.ekt.bte.core.Value;
|
|
||||||
import org.apache.logging.log4j.LogManager;
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.dspace.content.Item;
|
|
||||||
import org.dspace.content.MetadataSchema;
|
|
||||||
import org.dspace.content.MetadataValue;
|
|
||||||
import org.dspace.content.factory.ContentServiceFactory;
|
|
||||||
import org.dspace.content.service.ItemService;
|
|
||||||
import org.dspace.content.service.MetadataSchemaService;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.services.ConfigurationService;
|
|
||||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Andrea Bollini
|
|
||||||
* @author Kostas Stamatis
|
|
||||||
* @author Luigi Andrea Pascarelli
|
|
||||||
* @author Panagiotis Koutsourakis
|
|
||||||
*/
|
|
||||||
public class SubmissionLookupUtils {
|
|
||||||
private static final Logger log = LogManager.getLogger(SubmissionLookupUtils.class);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Default constructor
|
|
||||||
*/
|
|
||||||
private SubmissionLookupUtils() { }
|
|
||||||
|
|
||||||
private static final ConfigurationService configurationService
|
|
||||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
|
||||||
/**
|
|
||||||
* Location of config file
|
|
||||||
*/
|
|
||||||
private static final String configFilePath = configurationService
|
|
||||||
.getProperty("dspace.dir")
|
|
||||||
+ File.separator
|
|
||||||
+ "config"
|
|
||||||
+ File.separator + "crosswalks" + File.separator;
|
|
||||||
|
|
||||||
// Patter to extract the converter name if any
|
|
||||||
private static final Pattern converterPattern = Pattern.compile(".*\\((.*)\\)");
|
|
||||||
|
|
||||||
protected static final MetadataSchemaService metadataSchemaService =
|
|
||||||
ContentServiceFactory.getInstance().getMetadataSchemaService();
|
|
||||||
protected static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
|
||||||
|
|
||||||
public static LookupProvidersCheck getProvidersCheck(Context context,
|
|
||||||
Item item, String dcSchema, String dcElement,
|
|
||||||
String dcQualifier) {
|
|
||||||
try {
|
|
||||||
LookupProvidersCheck check = new LookupProvidersCheck();
|
|
||||||
List<MetadataSchema> schemas = metadataSchemaService.findAll(context);
|
|
||||||
List<MetadataValue> values = itemService.getMetadata(item, dcSchema, dcElement,
|
|
||||||
dcQualifier, Item.ANY);
|
|
||||||
|
|
||||||
for (MetadataSchema schema : schemas) {
|
|
||||||
boolean error = false;
|
|
||||||
if (schema.getNamespace().startsWith(
|
|
||||||
SubmissionLookupService.SL_NAMESPACE_PREFIX)) {
|
|
||||||
List<MetadataValue> slCache = itemService.getMetadata(item, schema.getName(),
|
|
||||||
dcElement, dcQualifier, Item.ANY);
|
|
||||||
if (slCache.isEmpty()) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (slCache.size() != values.size()) {
|
|
||||||
error = true;
|
|
||||||
} else {
|
|
||||||
for (int idx = 0; idx < values.size(); idx++) {
|
|
||||||
MetadataValue v = values.get(idx);
|
|
||||||
MetadataValue sl = slCache.get(idx);
|
|
||||||
// FIXME gestire authority e possibilita' multiple:
|
|
||||||
// match non sicuri, affiliation, etc.
|
|
||||||
if (!v.getValue().equals(sl.getValue())) {
|
|
||||||
error = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (error) {
|
|
||||||
check.getProvidersErr().add(schema.getName());
|
|
||||||
} else {
|
|
||||||
check.getProvidersOk().add(schema.getName());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return check;
|
|
||||||
} catch (SQLException e) {
|
|
||||||
log.error(e.getMessage(), e);
|
|
||||||
throw new RuntimeException(e.getMessage(), e);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public static String normalizeDOI(String doi) {
|
|
||||||
if (doi != null) {
|
|
||||||
return doi.trim().replaceAll("^http://dx.doi.org/", "")
|
|
||||||
.replaceAll("^doi:", "");
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public static String getFirstValue(Record rec, String field) {
|
|
||||||
List<Value> values = rec.getValues(field);
|
|
||||||
String value = null;
|
|
||||||
if (values != null && values.size() > 0) {
|
|
||||||
value = values.get(0).getAsString();
|
|
||||||
}
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static List<String> getValues(Record rec, String field) {
|
|
||||||
List<String> result = new ArrayList<>();
|
|
||||||
List<Value> values = rec.getValues(field);
|
|
||||||
if (values != null && values.size() > 0) {
|
|
||||||
for (Value value : values) {
|
|
||||||
result.add(value.getAsString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static String getPrintableString(Record record) {
|
|
||||||
StringBuilder result = new StringBuilder();
|
|
||||||
|
|
||||||
result.append("\nPublication {\n");
|
|
||||||
|
|
||||||
for (String field : record.getFields()) {
|
|
||||||
result.append("--").append(field).append(":\n");
|
|
||||||
List<Value> values = record.getValues(field);
|
|
||||||
for (Value value : values) {
|
|
||||||
result.append("\t").append(value.getAsString()).append("\n");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result.append("}\n");
|
|
||||||
|
|
||||||
return result.toString();
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,97 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.dspace.submit.lookup;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.AbstractModifier;
|
|
||||||
import gr.ekt.bte.core.MutableRecord;
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import gr.ekt.bte.core.StringValue;
|
|
||||||
import gr.ekt.bte.core.Value;
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Andrea Bollini
|
|
||||||
* @author Kostas Stamatis
|
|
||||||
* @author Luigi Andrea Pascarelli
|
|
||||||
* @author Panagiotis Koutsourakis
|
|
||||||
*/
|
|
||||||
public class ValueConcatenationModifier extends AbstractModifier {
|
|
||||||
private String field;
|
|
||||||
|
|
||||||
private String separator = ",";
|
|
||||||
|
|
||||||
private boolean whitespaceAfter = true;
|
|
||||||
|
|
||||||
public ValueConcatenationModifier() {
|
|
||||||
super("ValueConcatenationModifier");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Record modify(MutableRecord rec) {
|
|
||||||
List<Value> values = rec.getValues(field);
|
|
||||||
if (values != null) {
|
|
||||||
List<String> converted_values = new ArrayList<String>();
|
|
||||||
for (Value val : values) {
|
|
||||||
converted_values.add(val.getAsString());
|
|
||||||
}
|
|
||||||
List<Value> final_value = new ArrayList<Value>();
|
|
||||||
String v = StringUtils.join(converted_values.iterator(), separator
|
|
||||||
+ (whitespaceAfter ? " " : ""));
|
|
||||||
final_value.add(new StringValue(v));
|
|
||||||
rec.updateField(field, final_value);
|
|
||||||
}
|
|
||||||
|
|
||||||
return rec;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return the field
|
|
||||||
*/
|
|
||||||
public String getField() {
|
|
||||||
return field;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param field the field to set
|
|
||||||
*/
|
|
||||||
public void setField(String field) {
|
|
||||||
this.field = field;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return the separator
|
|
||||||
*/
|
|
||||||
public String getSeparator() {
|
|
||||||
return separator;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param separator the separator to set
|
|
||||||
*/
|
|
||||||
public void setSeparator(String separator) {
|
|
||||||
this.separator = separator;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return the whiteSpaceAfter
|
|
||||||
*/
|
|
||||||
public boolean isWhitespaceAfter() {
|
|
||||||
return whitespaceAfter;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param whiteSpaceAfter the whiteSpaceAfter to set
|
|
||||||
*/
|
|
||||||
public void setWhitespaceAfter(boolean whiteSpaceAfter) {
|
|
||||||
this.whitespaceAfter = whiteSpaceAfter;
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,34 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.step;
|
|
||||||
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.dspace.content.InProgressSubmission;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.submit.AbstractProcessingStep;
|
|
||||||
|
|
||||||
|
|
||||||
public class AccessStep extends AbstractProcessingStep {
|
|
||||||
/**
|
|
||||||
* log4j logger
|
|
||||||
*/
|
|
||||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(AccessStep.class);
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPreProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPostProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@@ -1,38 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.step;
|
|
||||||
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.dspace.content.InProgressSubmission;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.license.factory.LicenseServiceFactory;
|
|
||||||
import org.dspace.license.service.CreativeCommonsService;
|
|
||||||
import org.dspace.submit.AbstractProcessingStep;
|
|
||||||
|
|
||||||
public class CCLicenseStep extends AbstractProcessingStep {
|
|
||||||
/**
|
|
||||||
* log4j logger
|
|
||||||
*/
|
|
||||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(CCLicenseStep.class);
|
|
||||||
|
|
||||||
protected final CreativeCommonsService creativeCommonsService = LicenseServiceFactory.getInstance()
|
|
||||||
.getCreativeCommonsService();
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPreProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPostProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@@ -1,32 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.step;
|
|
||||||
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.dspace.content.InProgressSubmission;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.submit.AbstractProcessingStep;
|
|
||||||
|
|
||||||
public class CompleteStep extends AbstractProcessingStep {
|
|
||||||
/**
|
|
||||||
* log4j logger
|
|
||||||
*/
|
|
||||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(CompleteStep.class);
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPreProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPostProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,22 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.step;
|
|
||||||
|
|
||||||
import org.apache.logging.log4j.LogManager;
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.it)
|
|
||||||
*/
|
|
||||||
public class DescribeStep extends MetadataStep {
|
|
||||||
/**
|
|
||||||
* log4j logger
|
|
||||||
*/
|
|
||||||
private static final Logger log = LogManager.getLogger();
|
|
||||||
|
|
||||||
}
|
|
@@ -1,22 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.step;
|
|
||||||
|
|
||||||
import org.apache.logging.log4j.LogManager;
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.it)
|
|
||||||
*/
|
|
||||||
public class ExtractionStep extends MetadataStep {
|
|
||||||
/**
|
|
||||||
* log4j logger
|
|
||||||
*/
|
|
||||||
private static final Logger log = LogManager.getLogger();
|
|
||||||
|
|
||||||
}
|
|
@@ -1,27 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.step;
|
|
||||||
|
|
||||||
import org.dspace.content.InProgressSubmission;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.submit.AbstractProcessingStep;
|
|
||||||
|
|
||||||
public class InitialQuestionsStep extends AbstractProcessingStep {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPreProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPostProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,33 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.step;
|
|
||||||
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.dspace.content.InProgressSubmission;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.submit.AbstractProcessingStep;
|
|
||||||
|
|
||||||
public class LicenseStep extends AbstractProcessingStep {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* log4j logger
|
|
||||||
*/
|
|
||||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(LicenseStep.class);
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPreProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPostProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,198 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.step;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.sql.SQLException;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.DataLoader;
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import gr.ekt.bte.core.Value;
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
import org.apache.http.HttpException;
|
|
||||||
import org.apache.logging.log4j.LogManager;
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.dspace.authorize.AuthorizeException;
|
|
||||||
import org.dspace.content.InProgressSubmission;
|
|
||||||
import org.dspace.content.Item;
|
|
||||||
import org.dspace.content.MetadataValue;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.core.Utils;
|
|
||||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
|
||||||
import org.dspace.submit.AbstractProcessingStep;
|
|
||||||
import org.dspace.submit.listener.MetadataListener;
|
|
||||||
import org.dspace.submit.lookup.SubmissionLookupDataLoader;
|
|
||||||
|
|
||||||
//FIXME move to the ExtractionStep
|
|
||||||
/**
|
|
||||||
* @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.it)
|
|
||||||
*/
|
|
||||||
public class MetadataStep extends AbstractProcessingStep {
|
|
||||||
/**
|
|
||||||
* log4j logger
|
|
||||||
*/
|
|
||||||
private static final Logger log = LogManager.getLogger();
|
|
||||||
|
|
||||||
protected List<MetadataListener> listeners = DSpaceServicesFactory.getInstance().getServiceManager()
|
|
||||||
.getServicesByType(MetadataListener.class);
|
|
||||||
|
|
||||||
protected Map<String, List<MetadataValue>> metadataMap = new HashMap<>();
|
|
||||||
private final Map<String, Set<String>> results = new HashMap<>();
|
|
||||||
private final Map<String, String> mappingIdentifier = new HashMap<>();
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPreProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
for (MetadataListener listener : listeners) {
|
|
||||||
for (String metadata : listener.getMetadata().keySet()) {
|
|
||||||
String[] tokenized = Utils.tokenize(metadata);
|
|
||||||
List<MetadataValue> mm = itemService.getMetadata(wsi.getItem(), tokenized[0], tokenized[1],
|
|
||||||
tokenized[2], Item.ANY);
|
|
||||||
if (mm != null && !mm.isEmpty()) {
|
|
||||||
metadataMap.put(metadata, mm);
|
|
||||||
} else {
|
|
||||||
metadataMap.put(metadata, new ArrayList<>());
|
|
||||||
}
|
|
||||||
mappingIdentifier.put(metadata, listener.getMetadata().get(metadata));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPostProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
external:
|
|
||||||
for (String metadata : metadataMap.keySet()) {
|
|
||||||
String[] tokenized = Utils.tokenize(metadata);
|
|
||||||
List<MetadataValue> currents = itemService.getMetadata(wsi.getItem(), tokenized[0], tokenized[1],
|
|
||||||
tokenized[2], Item.ANY);
|
|
||||||
if (currents != null && !currents.isEmpty()) {
|
|
||||||
List<MetadataValue> olds = metadataMap.get(metadata);
|
|
||||||
if (olds.isEmpty()) {
|
|
||||||
process(context, metadata, currents);
|
|
||||||
continue external;
|
|
||||||
}
|
|
||||||
internal:
|
|
||||||
for (MetadataValue current : currents) {
|
|
||||||
|
|
||||||
boolean found = false;
|
|
||||||
for (MetadataValue old : olds) {
|
|
||||||
if (old.getValue().equals(current.getValue())) {
|
|
||||||
found = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!found) {
|
|
||||||
process(context, metadata, current);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!results.isEmpty()) {
|
|
||||||
for (MetadataListener listener : listeners) {
|
|
||||||
for (DataLoader dataLoader : listener.getDataloadersMap().values()) {
|
|
||||||
SubmissionLookupDataLoader submissionLookupDataLoader = (SubmissionLookupDataLoader) dataLoader;
|
|
||||||
try {
|
|
||||||
List<Record> recordSet = submissionLookupDataLoader.getByIdentifier(context, results);
|
|
||||||
List<Record> resultSet = convertFields(recordSet, bteBatchImportService.getOutputMap());
|
|
||||||
enrichItem(context, resultSet, wsi.getItem());
|
|
||||||
} catch (HttpException | IOException | SQLException | AuthorizeException e) {
|
|
||||||
log.error(e.getMessage(), e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void enrichItem(Context context, List<Record> rset, Item item) throws SQLException, AuthorizeException {
|
|
||||||
for (Record record : rset) {
|
|
||||||
for (String field : record.getFields()) {
|
|
||||||
try {
|
|
||||||
String[] tfield = Utils.tokenize(field);
|
|
||||||
List<MetadataValue> mdvs = itemService
|
|
||||||
.getMetadata(item, tfield[0], tfield[1], tfield[2], Item.ANY);
|
|
||||||
if (mdvs == null || mdvs.isEmpty()) {
|
|
||||||
for (Value value : record.getValues(field)) {
|
|
||||||
|
|
||||||
itemService.addMetadata(context, item, tfield[0], tfield[1], tfield[2], null,
|
|
||||||
value.getAsString());
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
external:
|
|
||||||
for (Value value : record.getValues(field)) {
|
|
||||||
boolean found = false;
|
|
||||||
for (MetadataValue mdv : mdvs) {
|
|
||||||
if (mdv.getValue().equals(value.getAsString())) {
|
|
||||||
found = true;
|
|
||||||
continue external;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!found) {
|
|
||||||
itemService.addMetadata(context, item, tfield[0], tfield[1], tfield[2], null,
|
|
||||||
value.getAsString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (SQLException e) {
|
|
||||||
log.error(e.getMessage(), e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
itemService.update(context, item);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private void process(Context context, String metadata, List<MetadataValue> currents) {
|
|
||||||
for (MetadataValue current : currents) {
|
|
||||||
process(context, metadata, current);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void process(Context context, String metadata, MetadataValue current) {
|
|
||||||
String key = mappingIdentifier.get(metadata);
|
|
||||||
Set<String> identifiers = null;
|
|
||||||
if (!results.containsKey(key)) {
|
|
||||||
identifiers = new HashSet<>();
|
|
||||||
} else {
|
|
||||||
identifiers = results.get(key);
|
|
||||||
}
|
|
||||||
identifiers.add(current.getValue());
|
|
||||||
results.put(key, identifiers);
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<Record> convertFields(List<Record> recordSet, Map<String, String> fieldMap) {
|
|
||||||
List<Record> result = new ArrayList<>();
|
|
||||||
for (Record publication : recordSet) {
|
|
||||||
for (String fieldName : fieldMap.keySet()) {
|
|
||||||
String md = null;
|
|
||||||
if (fieldMap != null) {
|
|
||||||
md = fieldMap.get(fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (StringUtils.isBlank(md)) {
|
|
||||||
continue;
|
|
||||||
} else {
|
|
||||||
md = md.trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (publication.isMutable()) {
|
|
||||||
List<Value> values = publication.getValues(md);
|
|
||||||
publication.makeMutable().removeField(md);
|
|
||||||
publication.makeMutable().addField(fieldName, values);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result.add(publication);
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,28 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.step;
|
|
||||||
|
|
||||||
import org.dspace.content.InProgressSubmission;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.submit.AbstractProcessingStep;
|
|
||||||
|
|
||||||
public class SampleStep extends AbstractProcessingStep {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPreProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPostProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@@ -1,30 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.step;
|
|
||||||
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.dspace.content.InProgressSubmission;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.submit.AbstractProcessingStep;
|
|
||||||
|
|
||||||
public class SelectCollectionStep extends AbstractProcessingStep {
|
|
||||||
|
|
||||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SelectCollectionStep.class);
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPreProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPostProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,33 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.step;
|
|
||||||
|
|
||||||
import org.apache.logging.log4j.LogManager;
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.dspace.content.InProgressSubmission;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.submit.AbstractProcessingStep;
|
|
||||||
|
|
||||||
public class StartSubmissionLookupStep extends AbstractProcessingStep {
|
|
||||||
/**
|
|
||||||
* log4j logger
|
|
||||||
*/
|
|
||||||
private static Logger log = LogManager.getLogger(StartSubmissionLookupStep.class);
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPreProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPostProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,33 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.step;
|
|
||||||
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.dspace.content.InProgressSubmission;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.submit.AbstractProcessingStep;
|
|
||||||
|
|
||||||
public class UploadStep extends AbstractProcessingStep {
|
|
||||||
/**
|
|
||||||
* log4j logger
|
|
||||||
*/
|
|
||||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(UploadStep.class);
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPreProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPostProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@@ -1,18 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.step;
|
|
||||||
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
|
|
||||||
public class UploadWithEmbargoStep extends UploadStep {
|
|
||||||
/**
|
|
||||||
* log4j logger
|
|
||||||
*/
|
|
||||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(UploadWithEmbargoStep.class);
|
|
||||||
|
|
||||||
}
|
|
@@ -1,27 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.step;
|
|
||||||
|
|
||||||
import org.dspace.content.InProgressSubmission;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.submit.AbstractProcessingStep;
|
|
||||||
|
|
||||||
public class VerifyStep extends AbstractProcessingStep {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPreProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void doPostProcessing(Context context, InProgressSubmission wsi) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,91 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.util;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
import java.util.LinkedHashSet;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Set;
|
|
||||||
import java.util.UUID;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.DataLoader;
|
|
||||||
import gr.ekt.bte.core.MutableRecord;
|
|
||||||
import gr.ekt.bte.core.Record;
|
|
||||||
import gr.ekt.bte.core.Value;
|
|
||||||
import org.dspace.submit.lookup.SubmissionLookupService;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Andrea Bollini
|
|
||||||
* @author Kostas Stamatis
|
|
||||||
* @author Luigi Andrea Pascarelli
|
|
||||||
* @author Panagiotis Koutsourakis
|
|
||||||
*/
|
|
||||||
public class ItemSubmissionLookupDTO implements Serializable {
|
|
||||||
private static final long serialVersionUID = 1;
|
|
||||||
|
|
||||||
private static final String MERGED_PUBLICATION_PROVIDER = "merged";
|
|
||||||
|
|
||||||
private static final String UNKNOWN_PROVIDER_STRING = "UNKNOWN-PROVIDER";
|
|
||||||
|
|
||||||
private List<Record> publications;
|
|
||||||
|
|
||||||
private String uuid;
|
|
||||||
|
|
||||||
public ItemSubmissionLookupDTO(List<Record> publications) {
|
|
||||||
this.uuid = UUID.randomUUID().toString();
|
|
||||||
this.publications = publications;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<Record> getPublications() {
|
|
||||||
return publications;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Set<String> getProviders() {
|
|
||||||
Set<String> orderedProviders = new LinkedHashSet<String>();
|
|
||||||
for (Record p : publications) {
|
|
||||||
orderedProviders.add(SubmissionLookupService.getProviderName(p));
|
|
||||||
}
|
|
||||||
return orderedProviders;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getUUID() {
|
|
||||||
return uuid;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Record getTotalPublication(List<DataLoader> providers) {
|
|
||||||
if (publications == null) {
|
|
||||||
return null;
|
|
||||||
} else if (publications.size() == 1) {
|
|
||||||
return publications.get(0);
|
|
||||||
} else {
|
|
||||||
MutableRecord pub = new SubmissionLookupPublication(
|
|
||||||
MERGED_PUBLICATION_PROVIDER);
|
|
||||||
// for (SubmissionLookupProvider prov : providers)
|
|
||||||
// {
|
|
||||||
for (Record p : publications) {
|
|
||||||
// if
|
|
||||||
// (!SubmissionLookupService.getProviderName(p).equals(prov.getShortName()))
|
|
||||||
// {
|
|
||||||
// continue;
|
|
||||||
// }
|
|
||||||
for (String field : p.getFields()) {
|
|
||||||
List<Value> values = p.getValues(field);
|
|
||||||
if (values != null && values.size() > 0) {
|
|
||||||
if (!pub.getFields().contains(field)) {
|
|
||||||
for (Value v : values) {
|
|
||||||
pub.addValue(field, v);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// }
|
|
||||||
return pub;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,45 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.util;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.UUID;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Andrea Bollini
|
|
||||||
* @author Kostas Stamatis
|
|
||||||
* @author Luigi Andrea Pascarelli
|
|
||||||
* @author Panagiotis Koutsourakis
|
|
||||||
*/
|
|
||||||
public class SubmissionLookupDTO implements Serializable {
|
|
||||||
private static final long serialVersionUID = 1;
|
|
||||||
|
|
||||||
private String uuid;
|
|
||||||
|
|
||||||
private List<ItemSubmissionLookupDTO> items;
|
|
||||||
|
|
||||||
public SubmissionLookupDTO() {
|
|
||||||
this.uuid = UUID.randomUUID().toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setItems(List<ItemSubmissionLookupDTO> items) {
|
|
||||||
this.items = items;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ItemSubmissionLookupDTO getLookupItem(String uuidLookup) {
|
|
||||||
if (items != null) {
|
|
||||||
for (ItemSubmissionLookupDTO item : items) {
|
|
||||||
if (item.getUUID().equals(uuidLookup)) {
|
|
||||||
return item;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,189 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
package org.dspace.submit.util;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import gr.ekt.bte.core.MutableRecord;
|
|
||||||
import gr.ekt.bte.core.StringValue;
|
|
||||||
import gr.ekt.bte.core.Value;
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
import org.dspace.submit.lookup.SubmissionLookupDataLoader;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Andrea Bollini
|
|
||||||
* @author Kostas Stamatis
|
|
||||||
* @author Luigi Andrea Pascarelli
|
|
||||||
* @author Panagiotis Koutsourakis
|
|
||||||
*/
|
|
||||||
public class SubmissionLookupPublication implements MutableRecord, Serializable {
|
|
||||||
private String providerName;
|
|
||||||
|
|
||||||
private Map<String, List<String>> storage = new HashMap<String, List<String>>();
|
|
||||||
|
|
||||||
public SubmissionLookupPublication(String providerName) {
|
|
||||||
this.providerName = providerName;
|
|
||||||
}
|
|
||||||
|
|
||||||
// needed to serialize it with JSON
|
|
||||||
public Map<String, List<String>> getStorage() {
|
|
||||||
return storage;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<String> getFields() {
|
|
||||||
return storage.keySet();
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<String> remove(String md) {
|
|
||||||
return storage.remove(md);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void add(String md, String nValue) {
|
|
||||||
if (StringUtils.isNotBlank(nValue)) {
|
|
||||||
List<String> tmp = storage.get(md);
|
|
||||||
if (tmp == null) {
|
|
||||||
tmp = new ArrayList<String>();
|
|
||||||
storage.put(md, tmp);
|
|
||||||
}
|
|
||||||
tmp.add(nValue);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getFirstValue(String md) {
|
|
||||||
List<String> tmp = storage.get(md);
|
|
||||||
if (tmp == null || tmp.size() == 0) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return tmp.get(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getProviderName() {
|
|
||||||
return providerName;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getType() {
|
|
||||||
return getFirstValue(SubmissionLookupDataLoader.TYPE);
|
|
||||||
}
|
|
||||||
|
|
||||||
// BTE Record interface methods
|
|
||||||
@Override
|
|
||||||
public boolean hasField(String md) {
|
|
||||||
return storage.containsKey(md);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public List<Value> getValues(String md) {
|
|
||||||
List<String> stringValues = storage.get(md);
|
|
||||||
if (stringValues == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
List<Value> values = new ArrayList<Value>();
|
|
||||||
for (String value : stringValues) {
|
|
||||||
values.add(new StringValue(value));
|
|
||||||
}
|
|
||||||
return values;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isMutable() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public MutableRecord makeMutable() {
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean addField(String md, List<Value> values) {
|
|
||||||
if (storage.containsKey(md)) {
|
|
||||||
List<String> stringValues = storage.get(md);
|
|
||||||
if (values != null) {
|
|
||||||
for (Value value : values) {
|
|
||||||
stringValues.add(value.getAsString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
List<String> tmp = new ArrayList<String>();
|
|
||||||
if (values != null) {
|
|
||||||
for (Value value : values) {
|
|
||||||
tmp.add(value.getAsString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
storage.put(md, tmp);
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean addValue(String md, Value value) {
|
|
||||||
if (storage.containsKey(md)) {
|
|
||||||
List<String> stringValues = storage.get(md);
|
|
||||||
stringValues.add(value.getAsString());
|
|
||||||
} else {
|
|
||||||
List<String> tmp = new ArrayList<String>();
|
|
||||||
tmp.add(value.getAsString());
|
|
||||||
|
|
||||||
storage.put(md, tmp);
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean removeField(String md) {
|
|
||||||
if (storage.containsKey(md)) {
|
|
||||||
storage.remove(md);
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean removeValue(String md, Value value) {
|
|
||||||
if (storage.containsKey(md)) {
|
|
||||||
List<String> stringValues = storage.get(md);
|
|
||||||
stringValues.remove(value.getAsString());
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean updateField(String md, List<Value> values) {
|
|
||||||
List<String> stringValues = new ArrayList<String>();
|
|
||||||
for (Value value : values) {
|
|
||||||
stringValues.add(value.getAsString());
|
|
||||||
}
|
|
||||||
storage.put(md, stringValues);
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean updateValue(String md, Value valueOld, Value valueNew) {
|
|
||||||
if (storage.containsKey(md)) {
|
|
||||||
List<String> stringValues = storage.get(md);
|
|
||||||
List<String> newStringValues = storage.get(md);
|
|
||||||
for (String s : stringValues) {
|
|
||||||
if (s.equals(valueOld.getAsString())) {
|
|
||||||
newStringValues.add(valueNew.getAsString());
|
|
||||||
} else {
|
|
||||||
newStringValues.add(s);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
storage.put(md, newStringValues);
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user