mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-07 01:54:22 +00:00
Merge remote-tracking branch 'dspace/main' into w2p-72215_metadataimport-email-parameter
This commit is contained in:
@@ -0,0 +1,107 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.importer.external.bibtex.service;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import javax.annotation.Resource;
|
||||
|
||||
import org.dspace.importer.external.exception.FileSourceException;
|
||||
import org.dspace.importer.external.service.components.AbstractPlainMetadataSource;
|
||||
import org.dspace.importer.external.service.components.dto.PlainMetadataKeyValueItem;
|
||||
import org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto;
|
||||
import org.jbibtex.BibTeXDatabase;
|
||||
import org.jbibtex.BibTeXEntry;
|
||||
import org.jbibtex.BibTeXParser;
|
||||
import org.jbibtex.Key;
|
||||
import org.jbibtex.ParseException;
|
||||
import org.jbibtex.Value;
|
||||
|
||||
/**
|
||||
* Implements a metadata importer for BibTeX files
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*/
|
||||
public class BibtexImportMetadataSourceServiceImpl extends AbstractPlainMetadataSource {
|
||||
|
||||
|
||||
/**
|
||||
* The string that identifies this import implementation as
|
||||
* MetadataSource implementation
|
||||
*
|
||||
* @return the identifying uri
|
||||
*/
|
||||
@Override
|
||||
public String getImportSource() {
|
||||
return "BibTeXMetadataSource";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PlainMetadataSourceDto> readData (InputStream
|
||||
inputStream) throws FileSourceException {
|
||||
List<PlainMetadataSourceDto> list = new ArrayList<>();
|
||||
BibTeXDatabase database;
|
||||
try {
|
||||
database = parseBibTex(inputStream);
|
||||
} catch (IOException | ParseException e) {
|
||||
throw new FileSourceException("Unable to parse file with BibTeX parser");
|
||||
}
|
||||
if (database == null || database.getEntries() == null) {
|
||||
throw new FileSourceException("File results in an empty list of metadata");
|
||||
}
|
||||
if (database.getEntries() != null) {
|
||||
for (Entry<Key, BibTeXEntry> entry : database.getEntries().entrySet()) {
|
||||
PlainMetadataSourceDto item = new PlainMetadataSourceDto();
|
||||
List<PlainMetadataKeyValueItem> keyValues = new ArrayList<>();
|
||||
item.setMetadata(keyValues);
|
||||
PlainMetadataKeyValueItem keyValueItem = new PlainMetadataKeyValueItem();
|
||||
keyValueItem.setKey(entry.getValue().getType().getValue());
|
||||
keyValueItem.setValue(entry.getKey().getValue());
|
||||
keyValues.add(keyValueItem);
|
||||
if (entry.getValue().getFields() != null) {
|
||||
for (Entry<Key,Value> subentry : entry.getValue().getFields().entrySet()) {
|
||||
PlainMetadataKeyValueItem innerItem = new PlainMetadataKeyValueItem();
|
||||
innerItem.setKey(subentry.getKey().getValue());
|
||||
innerItem.setValue(subentry.getValue().toUserString());
|
||||
keyValues.add(innerItem);
|
||||
}
|
||||
}
|
||||
list.add(item);
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
private BibTeXDatabase parseBibTex(InputStream inputStream) throws IOException, ParseException {
|
||||
Reader reader = new InputStreamReader(inputStream);
|
||||
BibTeXParser bibtexParser = new BibTeXParser();
|
||||
return bibtexParser.parse(reader);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Retrieve the MetadataFieldMapping containing the mapping between RecordType
|
||||
* (in this case PlainMetadataSourceDto.class) and Metadata
|
||||
*
|
||||
* @return The configured MetadataFieldMapping
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
@Resource(name = "bibtexMetadataFieldMap")
|
||||
public void setMetadataFieldMap(@SuppressWarnings("rawtypes") Map metadataFieldMap) {
|
||||
super.setMetadataFieldMap(metadataFieldMap);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,29 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.importer.external.exception;
|
||||
|
||||
/**
|
||||
* This exception could be throws when more than one element is found
|
||||
* in a method that works on one only.
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*/
|
||||
|
||||
public class FileMultipleOccurencesException extends Exception {
|
||||
|
||||
private static final long serialVersionUID = 1222409723339501937L;
|
||||
|
||||
public FileMultipleOccurencesException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public FileMultipleOccurencesException(String message) {
|
||||
super(message);
|
||||
}
|
||||
}
|
28
dspace-api/src/main/java/org/dspace/importer/external/exception/FileSourceException.java
vendored
Normal file
28
dspace-api/src/main/java/org/dspace/importer/external/exception/FileSourceException.java
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.importer.external.exception;
|
||||
|
||||
/**
|
||||
* Represents a problem with the File content: e.g. null input stream, invalid content, ...
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*/
|
||||
|
||||
public class FileSourceException extends Exception {
|
||||
|
||||
private static final long serialVersionUID = 6895579588455260182L;
|
||||
|
||||
public FileSourceException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public FileSourceException(String message) {
|
||||
super(message);
|
||||
}
|
||||
}
|
@@ -117,16 +117,13 @@ public abstract class AbstractMetadataFieldMapping<RecordType>
|
||||
public Collection<MetadatumDTO> resultToDCValueMapping(RecordType record) {
|
||||
List<MetadatumDTO> values = new LinkedList<MetadatumDTO>();
|
||||
|
||||
|
||||
for (MetadataContributor<RecordType> query : getMetadataFieldMap().values()) {
|
||||
try {
|
||||
values.addAll(query.contributeMetadata(record));
|
||||
} catch (Exception e) {
|
||||
log.error("Error", e);
|
||||
}
|
||||
|
||||
}
|
||||
return values;
|
||||
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,94 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.importer.external.metadatamapping.contributor;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
|
||||
import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
|
||||
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
|
||||
import org.dspace.importer.external.service.components.dto.PlainMetadataKeyValueItem;
|
||||
import org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto;
|
||||
|
||||
/**
|
||||
* Metadata contributor that takes an PlainMetadataSourceDto instance and turns it into a
|
||||
* collection of metadatum
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*/
|
||||
public class SimpleMetadataContributor implements MetadataContributor<PlainMetadataSourceDto> {
|
||||
|
||||
private MetadataFieldConfig field;
|
||||
|
||||
private String key;
|
||||
|
||||
private MetadataFieldMapping<PlainMetadataSourceDto,
|
||||
MetadataContributor<PlainMetadataSourceDto>> metadataFieldMapping;
|
||||
|
||||
public SimpleMetadataContributor(MetadataFieldConfig field, String key) {
|
||||
this.field = field;
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
public SimpleMetadataContributor() { }
|
||||
|
||||
/**
|
||||
* Set the metadataFieldMapping of this SimpleMetadataContributor
|
||||
*
|
||||
* @param metadataFieldMapping the new mapping.
|
||||
*/
|
||||
@Override
|
||||
public void setMetadataFieldMapping(
|
||||
MetadataFieldMapping<PlainMetadataSourceDto,
|
||||
MetadataContributor<PlainMetadataSourceDto>> metadataFieldMapping) {
|
||||
this.metadataFieldMapping = metadataFieldMapping;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the metadata associated with the given object.
|
||||
* It match the key found in PlainMetadataSourceDto instance with the key passed to constructor.
|
||||
* In case of success, new metadatum is constructer (using field elements and PlainMetadataSourceDto value)
|
||||
* and added to the list.
|
||||
*
|
||||
* @param t A class to retrieve metadata and key to match from. t and contained list "metadata" MUST be not null.
|
||||
* @return a collection of import records. Only the identifier of the found records may be put in the record.
|
||||
*/
|
||||
@Override
|
||||
public Collection<MetadatumDTO> contributeMetadata(PlainMetadataSourceDto t) {
|
||||
List<MetadatumDTO> values = new LinkedList<>();
|
||||
for (PlainMetadataKeyValueItem metadatum : t.getMetadata()) {
|
||||
if (key.equals(metadatum.getKey())) {
|
||||
MetadatumDTO dcValue = new MetadatumDTO();
|
||||
dcValue.setValue(metadatum.getValue());
|
||||
dcValue.setElement(field.getElement());
|
||||
dcValue.setQualifier(field.getQualifier());
|
||||
dcValue.setSchema(field.getSchema());
|
||||
values.add(dcValue);
|
||||
}
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
/*
|
||||
* Setter to inject field item
|
||||
*/
|
||||
public void setField(MetadataFieldConfig field) {
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
/*
|
||||
* Setter to inject key value
|
||||
*/
|
||||
public void setKey(String key) {
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
}
|
@@ -8,6 +8,10 @@
|
||||
|
||||
package org.dspace.importer.external.pubmed.service;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
import java.io.StringReader;
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedList;
|
||||
@@ -20,6 +24,7 @@ import javax.ws.rs.client.WebTarget;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import com.google.common.io.CharStreams;
|
||||
import org.apache.axiom.om.OMElement;
|
||||
import org.apache.axiom.om.OMXMLBuilderFactory;
|
||||
import org.apache.axiom.om.OMXMLParserWrapper;
|
||||
@@ -27,8 +32,12 @@ import org.apache.axiom.om.xpath.AXIOMXPath;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.importer.external.datamodel.ImportRecord;
|
||||
import org.dspace.importer.external.datamodel.Query;
|
||||
import org.dspace.importer.external.exception.FileMultipleOccurencesException;
|
||||
import org.dspace.importer.external.exception.FileSourceException;
|
||||
import org.dspace.importer.external.exception.MetadataSourceException;
|
||||
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
|
||||
import org.dspace.importer.external.service.components.FileSource;
|
||||
import org.dspace.importer.external.service.components.QuerySource;
|
||||
import org.jaxen.JaxenException;
|
||||
|
||||
/**
|
||||
@@ -36,11 +45,29 @@ import org.jaxen.JaxenException;
|
||||
*
|
||||
* @author Roeland Dillen (roeland at atmire dot com)
|
||||
*/
|
||||
public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<OMElement> {
|
||||
public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<OMElement>
|
||||
implements QuerySource, FileSource {
|
||||
|
||||
private String baseAddress;
|
||||
|
||||
private WebTarget pubmedWebTarget;
|
||||
|
||||
private List<String> supportedExtensions;
|
||||
|
||||
/**
|
||||
* Set the file extensions supported by this metadata service
|
||||
*
|
||||
* @param supportedExtensionsthe file extensions (xml,txt,...) supported by this service
|
||||
*/
|
||||
public void setSupportedExtensions(List<String> supportedExtensions) {
|
||||
this.supportedExtensions = supportedExtensions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getSupportedExtensions() {
|
||||
return supportedExtensions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the number of records matching a query;
|
||||
*
|
||||
@@ -49,7 +76,7 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
@Override
|
||||
public int getNbRecords(String query) throws MetadataSourceException {
|
||||
public int getRecordsCount(String query) throws MetadataSourceException {
|
||||
return retry(new GetNbRecords(query));
|
||||
}
|
||||
|
||||
@@ -61,7 +88,7 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
@Override
|
||||
public int getNbRecords(Query query) throws MetadataSourceException {
|
||||
public int getRecordsCount(Query query) throws MetadataSourceException {
|
||||
return retry(new GetNbRecords(query));
|
||||
}
|
||||
|
||||
@@ -357,7 +384,6 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
|
||||
@Override
|
||||
public Collection<ImportRecord> call() throws Exception {
|
||||
List<ImportRecord> records = new LinkedList<ImportRecord>();
|
||||
|
||||
WebTarget getRecordIdsTarget = pubmedWebTarget
|
||||
.queryParam("term", query.getParameterAsClass("term", String.class));
|
||||
@@ -382,13 +408,41 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
invocationBuilder = getRecordsTarget.request(MediaType.TEXT_PLAIN_TYPE);
|
||||
response = invocationBuilder.get();
|
||||
|
||||
List<OMElement> omElements = splitToRecords(response.readEntity(String.class));
|
||||
|
||||
for (OMElement record : omElements) {
|
||||
records.add(transformSourceRecords(record));
|
||||
}
|
||||
|
||||
return records;
|
||||
String xml = response.readEntity(String.class);
|
||||
return parseXMLString(xml);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<ImportRecord> getRecords(InputStream inputStream) throws FileSourceException {
|
||||
String xml = null;
|
||||
try (Reader reader = new InputStreamReader(inputStream, "UTF-8")) {
|
||||
xml = CharStreams.toString(reader);
|
||||
return parseXMLString(xml);
|
||||
} catch (IOException e) {
|
||||
throw new FileSourceException ("Cannot read XML from InputStream", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ImportRecord getRecord(InputStream inputStream) throws FileSourceException, FileMultipleOccurencesException {
|
||||
List<ImportRecord> importRecord = getRecords(inputStream);
|
||||
if (importRecord == null || importRecord.isEmpty()) {
|
||||
throw new FileSourceException("Cannot find (valid) record in File");
|
||||
} else if (importRecord.size() > 1) {
|
||||
throw new FileMultipleOccurencesException("File contains more than one entry");
|
||||
} else {
|
||||
return importRecord.get(0);
|
||||
}
|
||||
}
|
||||
|
||||
private List<ImportRecord> parseXMLString(String xml) {
|
||||
List<ImportRecord> records = new LinkedList<ImportRecord>();
|
||||
List<OMElement> omElements = splitToRecords(xml);
|
||||
for (OMElement record : omElements) {
|
||||
records.add(transformSourceRecords(record));
|
||||
}
|
||||
return records;
|
||||
}
|
||||
}
|
||||
|
@@ -8,6 +8,10 @@
|
||||
|
||||
package org.dspace.importer.external.service;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
@@ -19,11 +23,16 @@ import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.importer.external.datamodel.ImportRecord;
|
||||
import org.dspace.importer.external.datamodel.Query;
|
||||
import org.dspace.importer.external.exception.FileMultipleOccurencesException;
|
||||
import org.dspace.importer.external.exception.FileSourceException;
|
||||
import org.dspace.importer.external.exception.MetadataSourceException;
|
||||
import org.dspace.importer.external.service.components.Destroyable;
|
||||
import org.dspace.importer.external.service.components.FileSource;
|
||||
import org.dspace.importer.external.service.components.MetadataSource;
|
||||
import org.dspace.importer.external.service.components.QuerySource;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
|
||||
/**
|
||||
* Main entry point for the import framework.
|
||||
* Instead of calling the different importer implementations, the ImportService should be called instead.
|
||||
@@ -32,8 +41,10 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
* importer implementation you want to use.
|
||||
*
|
||||
* @author Roeland Dillen (roeland at atmire dot com)
|
||||
* @author Pasquale Cavallo (pasquale.cavallo@4science.it)
|
||||
*/
|
||||
public class ImportService implements Destroyable {
|
||||
|
||||
private HashMap<String, MetadataSource> importSources = new HashMap<>();
|
||||
|
||||
Logger log = org.apache.logging.log4j.LogManager.getLogger(ImportService.class);
|
||||
@@ -101,11 +112,11 @@ public class ImportService implements Destroyable {
|
||||
public Collection<ImportRecord> findMatchingRecords(String uri, Item item) throws MetadataSourceException {
|
||||
try {
|
||||
List<ImportRecord> recordList = new LinkedList<ImportRecord>();
|
||||
|
||||
for (MetadataSource metadataSource : matchingImports(uri)) {
|
||||
recordList.addAll(metadataSource.findMatchingRecords(item));
|
||||
if (metadataSource instanceof QuerySource) {
|
||||
recordList.addAll(((QuerySource)metadataSource).findMatchingRecords(item));
|
||||
}
|
||||
}
|
||||
|
||||
return recordList;
|
||||
} catch (Exception e) {
|
||||
throw new MetadataSourceException(e);
|
||||
@@ -125,9 +136,10 @@ public class ImportService implements Destroyable {
|
||||
try {
|
||||
List<ImportRecord> recordList = new LinkedList<ImportRecord>();
|
||||
for (MetadataSource metadataSource : matchingImports(uri)) {
|
||||
recordList.addAll(metadataSource.findMatchingRecords(query));
|
||||
if (metadataSource instanceof QuerySource) {
|
||||
recordList.addAll(((QuerySource)metadataSource).findMatchingRecords(query));
|
||||
}
|
||||
}
|
||||
|
||||
return recordList;
|
||||
} catch (Exception e) {
|
||||
throw new MetadataSourceException(e);
|
||||
@@ -145,8 +157,10 @@ public class ImportService implements Destroyable {
|
||||
public int getNbRecords(String uri, String query) throws MetadataSourceException {
|
||||
try {
|
||||
int total = 0;
|
||||
for (MetadataSource MetadataSource : matchingImports(uri)) {
|
||||
total += MetadataSource.getNbRecords(query);
|
||||
for (MetadataSource metadataSource : matchingImports(uri)) {
|
||||
if (metadataSource instanceof QuerySource) {
|
||||
total += ((QuerySource)metadataSource).getRecordsCount(query);
|
||||
}
|
||||
}
|
||||
return total;
|
||||
} catch (Exception e) {
|
||||
@@ -165,8 +179,10 @@ public class ImportService implements Destroyable {
|
||||
public int getNbRecords(String uri, Query query) throws MetadataSourceException {
|
||||
try {
|
||||
int total = 0;
|
||||
for (MetadataSource MetadataSource : matchingImports(uri)) {
|
||||
total += MetadataSource.getNbRecords(query);
|
||||
for (MetadataSource metadataSource : matchingImports(uri)) {
|
||||
if (metadataSource instanceof QuerySource) {
|
||||
total += ((QuerySource)metadataSource).getRecordsCount(query);
|
||||
}
|
||||
}
|
||||
return total;
|
||||
} catch (Exception e) {
|
||||
@@ -189,7 +205,9 @@ public class ImportService implements Destroyable {
|
||||
try {
|
||||
List<ImportRecord> recordList = new LinkedList<>();
|
||||
for (MetadataSource metadataSource : matchingImports(uri)) {
|
||||
recordList.addAll(metadataSource.getRecords(query, start, count));
|
||||
if (metadataSource instanceof QuerySource) {
|
||||
recordList.addAll(((QuerySource)metadataSource).getRecords(query, start, count));
|
||||
}
|
||||
}
|
||||
return recordList;
|
||||
} catch (Exception e) {
|
||||
@@ -209,7 +227,9 @@ public class ImportService implements Destroyable {
|
||||
try {
|
||||
List<ImportRecord> recordList = new LinkedList<>();
|
||||
for (MetadataSource metadataSource : matchingImports(uri)) {
|
||||
recordList.addAll(metadataSource.getRecords(query));
|
||||
if (metadataSource instanceof QuerySource) {
|
||||
recordList.addAll(((QuerySource)metadataSource).getRecords(query));
|
||||
}
|
||||
}
|
||||
return recordList;
|
||||
} catch (Exception e) {
|
||||
@@ -229,10 +249,12 @@ public class ImportService implements Destroyable {
|
||||
public ImportRecord getRecord(String uri, String id) throws MetadataSourceException {
|
||||
try {
|
||||
for (MetadataSource metadataSource : matchingImports(uri)) {
|
||||
if (metadataSource.getRecord(id) != null) {
|
||||
return metadataSource.getRecord(id);
|
||||
if (metadataSource instanceof QuerySource) {
|
||||
QuerySource querySource = (QuerySource)metadataSource;
|
||||
if (querySource.getRecord(id) != null) {
|
||||
return querySource.getRecord(id);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
return null;
|
||||
} catch (Exception e) {
|
||||
@@ -252,10 +274,12 @@ public class ImportService implements Destroyable {
|
||||
public ImportRecord getRecord(String uri, Query query) throws MetadataSourceException {
|
||||
try {
|
||||
for (MetadataSource metadataSource : matchingImports(uri)) {
|
||||
if (metadataSource.getRecord(query) != null) {
|
||||
return metadataSource.getRecord(query);
|
||||
if (metadataSource instanceof QuerySource) {
|
||||
QuerySource querySource = (QuerySource)metadataSource;
|
||||
if (querySource.getRecord(query) != null) {
|
||||
return querySource.getRecord(query);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
return null;
|
||||
} catch (Exception e) {
|
||||
@@ -272,6 +296,41 @@ public class ImportService implements Destroyable {
|
||||
return importSources.keySet();
|
||||
}
|
||||
|
||||
/*
|
||||
* Get a collection of record from File,
|
||||
* The first match will be return.
|
||||
*
|
||||
* @param file The file from which will read records
|
||||
* @param originalName The original file name or full path
|
||||
* @return a single record contains the metadatum
|
||||
* @throws FileMultipleOccurencesException if more than one entry is found
|
||||
*/
|
||||
public ImportRecord getRecord(File file, String originalName)
|
||||
throws FileMultipleOccurencesException, FileSourceException {
|
||||
ImportRecord importRecords = null;
|
||||
for (MetadataSource metadataSource : importSources.values()) {
|
||||
try (InputStream fileInputStream = new FileInputStream(file)) {
|
||||
if (metadataSource instanceof FileSource) {
|
||||
FileSource fileSource = (FileSource)metadataSource;
|
||||
if (fileSource.isValidSourceForFile(originalName)) {
|
||||
importRecords = fileSource.getRecord(fileInputStream);
|
||||
break;
|
||||
}
|
||||
}
|
||||
//catch statements is required because we could have supported format (i.e. XML)
|
||||
//which fail on schema validation
|
||||
} catch (FileSourceException e) {
|
||||
log.debug(metadataSource.getImportSource() + " isn't a valid parser for file");
|
||||
} catch (FileMultipleOccurencesException e) {
|
||||
log.debug("File contains multiple metadata, return with error");
|
||||
throw e;
|
||||
} catch (IOException e1) {
|
||||
throw new FileSourceException("File cannot be read, may be null");
|
||||
}
|
||||
}
|
||||
return importRecords;
|
||||
}
|
||||
|
||||
/**
|
||||
* Call destroy on all {@link Destroyable} {@link MetadataSource} objects set in this ImportService
|
||||
*/
|
||||
|
@@ -0,0 +1,103 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.importer.external.service.components;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.importer.external.datamodel.ImportRecord;
|
||||
import org.dspace.importer.external.exception.FileMultipleOccurencesException;
|
||||
import org.dspace.importer.external.exception.FileSourceException;
|
||||
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
|
||||
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
|
||||
import org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto;
|
||||
|
||||
|
||||
/**
|
||||
* This class is an abstract implementation of {@link MetadataSource} useful in cases
|
||||
* of plain metadata sources.
|
||||
* It provides the methot to mapping metadata to DSpace Format when source is a file
|
||||
* whit a list of <key, value> strings.
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*/
|
||||
|
||||
public abstract class AbstractPlainMetadataSource
|
||||
extends AbstractMetadataFieldMapping<PlainMetadataSourceDto>
|
||||
implements FileSource {
|
||||
|
||||
protected abstract List<PlainMetadataSourceDto>
|
||||
readData(InputStream fileInpuStream) throws FileSourceException;
|
||||
|
||||
|
||||
private List<String> supportedExtensions;
|
||||
|
||||
/**
|
||||
* Set the file extensions supported by this metadata service
|
||||
*
|
||||
* @param supportedExtensionsthe file extensions (xml,txt,...) supported by this service
|
||||
*/
|
||||
public void setSupportedExtensions(List<String> supportedExtensions) {
|
||||
this.supportedExtensions = supportedExtensions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getSupportedExtensions() {
|
||||
return supportedExtensions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a list of ImportRecord constructed from input file. This list is based on
|
||||
* the results retrieved from the file (InputStream) parsed through abstract method readData
|
||||
*
|
||||
* @param InputStream The inputStream of the file
|
||||
* @return A list of {@link ImportRecord}
|
||||
* @throws FileSourceException if, for any reason, the file is not parsable
|
||||
*/
|
||||
@Override
|
||||
public List<ImportRecord> getRecords(InputStream is) throws FileSourceException {
|
||||
List<PlainMetadataSourceDto> datas = readData(is);
|
||||
List<ImportRecord> records = new ArrayList<>();
|
||||
for (PlainMetadataSourceDto item : datas) {
|
||||
records.add(toRecord(item));
|
||||
}
|
||||
return records;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return an ImportRecord constructed from input file. This list is based on
|
||||
* the result retrieved from the file (InputStream) parsed through abstract method
|
||||
* "readData" implementation
|
||||
*
|
||||
* @param InputStream The inputStream of the file
|
||||
* @return An {@link ImportRecord} matching the file content
|
||||
* @throws FileSourceException if, for any reason, the file is not parsable
|
||||
* @throws FileMultipleOccurencesException if the file contains more than one entry
|
||||
*/
|
||||
@Override
|
||||
public ImportRecord getRecord(InputStream is) throws FileSourceException, FileMultipleOccurencesException {
|
||||
List<PlainMetadataSourceDto> datas = readData(is);
|
||||
if (datas == null || datas.isEmpty()) {
|
||||
throw new FileSourceException("File is empty");
|
||||
}
|
||||
if (datas.size() > 1) {
|
||||
throw new FileMultipleOccurencesException("File "
|
||||
+ "contains more than one entry (" + datas.size() + " entries");
|
||||
}
|
||||
return toRecord(datas.get(0));
|
||||
}
|
||||
|
||||
|
||||
private ImportRecord toRecord(PlainMetadataSourceDto entry) {
|
||||
List<MetadatumDTO> metadata = new ArrayList<>();
|
||||
metadata.addAll(resultToDCValueMapping(entry));
|
||||
return new ImportRecord(metadata);
|
||||
}
|
||||
}
|
70
dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java
vendored
Normal file
70
dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.importer.external.service.components;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.importer.external.datamodel.ImportRecord;
|
||||
import org.dspace.importer.external.exception.FileMultipleOccurencesException;
|
||||
import org.dspace.importer.external.exception.FileSourceException;
|
||||
|
||||
/**
|
||||
* This interface declare the base methods to work with files containing metadata.
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*/
|
||||
public interface FileSource extends MetadataSource {
|
||||
|
||||
/**
|
||||
* Return a list of ImportRecord constructed from input file.
|
||||
*
|
||||
* @param InputStream The inputStream of the file
|
||||
* @return A list of {@link ImportRecord}
|
||||
* @throws FileSourceException if, for any reason, the file is not parsable
|
||||
*/
|
||||
public List<ImportRecord> getRecords(InputStream inputStream)
|
||||
throws FileSourceException;
|
||||
|
||||
/**
|
||||
* Return an ImportRecord constructed from input file.
|
||||
*
|
||||
* @param InputStream The inputStream of the file
|
||||
* @return An {@link ImportRecord} matching the file content
|
||||
* @throws FileSourceException if, for any reason, the file is not parsable
|
||||
* @throws FileMultipleOccurencesException if the file contains more than one entry
|
||||
*/
|
||||
public ImportRecord getRecord(InputStream inputStream)
|
||||
throws FileSourceException, FileMultipleOccurencesException;
|
||||
|
||||
/**
|
||||
* This method is used to decide if the FileSource manage the file format
|
||||
*
|
||||
* @param originalName the file file original name
|
||||
* @return true if the FileSource can parse the file, false otherwise
|
||||
*/
|
||||
public default boolean isValidSourceForFile(String originalName) {
|
||||
List<String> extensions = getSupportedExtensions();
|
||||
if (extensions == null || extensions.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
if (originalName != null && originalName.contains(".")) {
|
||||
String extension = originalName.substring(originalName.lastIndexOf('.') + 1,
|
||||
originalName.length());
|
||||
return getSupportedExtensions().contains(extension);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the file extensions (xml, csv, txt, ...) supported by the FileSource implementation
|
||||
*/
|
||||
public List<String> getSupportedExtensions();
|
||||
|
||||
}
|
@@ -8,76 +8,14 @@
|
||||
|
||||
package org.dspace.importer.external.service.components;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.importer.external.datamodel.ImportRecord;
|
||||
import org.dspace.importer.external.datamodel.Query;
|
||||
import org.dspace.importer.external.exception.MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Common interface for all import implementations.
|
||||
* Super interface for all import implementations.
|
||||
*
|
||||
* @author Roeland Dillen (roeland at atmire dot com)
|
||||
* @author Pasquale Cavallo (pasquale.cavallo@4science.it)
|
||||
*/
|
||||
public interface MetadataSource {
|
||||
/**
|
||||
* Gets the number of records matching a query
|
||||
*
|
||||
* @param query the query in string format
|
||||
* @return the number of records matching the query
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public int getNbRecords(String query) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Gets the number of records matching a query
|
||||
*
|
||||
* @param query the query object
|
||||
* @return the number of records matching the query
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public int getNbRecords(Query query) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Gets a set of records matching a query. Supports pagination
|
||||
*
|
||||
* @param query the query. The query will generally be posted 'as is' to the source
|
||||
* @param start offset
|
||||
* @param count page size
|
||||
* @return a collection of fully transformed id's
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Find records based on a object query.
|
||||
*
|
||||
* @param query a query object to base the search on.
|
||||
* @return a set of records. Fully transformed.
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Get a single record from the source.
|
||||
* The first match will be returned
|
||||
*
|
||||
* @param id identifier for the record
|
||||
* @return a matching record
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public ImportRecord getRecord(String id) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Get a single record from the source.
|
||||
* The first match will be returned
|
||||
*
|
||||
* @param query a query matching a single record
|
||||
* @return a matching record
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public ImportRecord getRecord(Query query) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* The string that identifies this import implementation. Preferable a URI
|
||||
@@ -86,23 +24,4 @@ public interface MetadataSource {
|
||||
*/
|
||||
public String getImportSource();
|
||||
|
||||
/**
|
||||
* Finds records based on an item
|
||||
* Delegates to one or more MetadataSource implementations based on the uri. Results will be aggregated.
|
||||
*
|
||||
* @param item an item to base the search on
|
||||
* @return a collection of import records. Only the identifier of the found records may be put in the record.
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Finds records based on query object.
|
||||
* Delegates to one or more MetadataSource implementations based on the uri. Results will be aggregated.
|
||||
*
|
||||
* @param query a query object to base the search on.
|
||||
* @return a collection of import records. Only the identifier of the found records may be put in the record.
|
||||
* @throws MetadataSourceException passed through.
|
||||
*/
|
||||
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException;
|
||||
}
|
||||
|
106
dspace-api/src/main/java/org/dspace/importer/external/service/components/QuerySource.java
vendored
Normal file
106
dspace-api/src/main/java/org/dspace/importer/external/service/components/QuerySource.java
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.importer.external.service.components;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.importer.external.datamodel.ImportRecord;
|
||||
import org.dspace.importer.external.datamodel.Query;
|
||||
import org.dspace.importer.external.exception.MetadataSourceException;
|
||||
|
||||
|
||||
/**
|
||||
* Common interface for database-based imports.
|
||||
*
|
||||
* @author Roeland Dillen (roeland at atmire dot com)
|
||||
* @author Pasquale Cavallo (pasquale.cavallo@4science.it)
|
||||
*/
|
||||
|
||||
public interface QuerySource extends MetadataSource {
|
||||
|
||||
/**
|
||||
* Get a single record from the source.
|
||||
* The first match will be returned
|
||||
*
|
||||
* @param id identifier for the record
|
||||
* @return a matching record
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public ImportRecord getRecord(String id) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Gets the number of records matching a query
|
||||
*
|
||||
* @param query the query in string format
|
||||
* @return the number of records matching the query
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public int getRecordsCount(String query) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Gets the number of records matching a query
|
||||
*
|
||||
* @param query the query object
|
||||
* @return the number of records matching the query
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public int getRecordsCount(Query query) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Gets a set of records matching a query. Supports pagination
|
||||
*
|
||||
* @param query the query. The query will generally be posted 'as is' to the source
|
||||
* @param start offset
|
||||
* @param count page size
|
||||
* @return a collection of fully transformed id's
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Find records based on a object query.
|
||||
*
|
||||
* @param query a query object to base the search on.
|
||||
* @return a set of records. Fully transformed.
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Get a single record from the source.
|
||||
* The first match will be returned
|
||||
*
|
||||
* @param query a query matching a single record
|
||||
* @return a matching record
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public ImportRecord getRecord(Query query) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Finds records based on query object.
|
||||
* Delegates to one or more MetadataSource implementations based on the uri. Results will be aggregated.
|
||||
*
|
||||
* @param query a query object to base the search on.
|
||||
* @return a collection of import records. Only the identifier of the found records may be put in the record.
|
||||
* @throws MetadataSourceException passed through.
|
||||
*/
|
||||
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Finds records based on an item
|
||||
* Delegates to one or more MetadataSource implementations based on the uri. Results will be aggregated.
|
||||
*
|
||||
* @param item an item to base the search on
|
||||
* @return a collection of import records. Only the identifier of the found records may be put in the record.
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException;
|
||||
|
||||
}
|
@@ -0,0 +1,50 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.importer.external.service.components.dto;
|
||||
|
||||
/**
|
||||
* Simple object to construct <key,value> items
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*/
|
||||
public class PlainMetadataKeyValueItem {
|
||||
|
||||
private String key;
|
||||
private String value;
|
||||
|
||||
/*
|
||||
* In a key-value items, like PlainMetadata, this method get the item's key
|
||||
*/
|
||||
public String getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
/*
|
||||
* In a key-value items, like PlainMetadata, this method set the item's key.
|
||||
* Never set or leave this field to null
|
||||
*
|
||||
*/
|
||||
public void setKey(String key) {
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
/*
|
||||
* In key-value items, like PlainMetadata, this method get the item's value
|
||||
*/
|
||||
public String getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
/*
|
||||
* In key-value items, like PlainMetadata, this method set the item's value
|
||||
*/
|
||||
public void setValue(String value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,38 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.importer.external.service.components.dto;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
|
||||
/**
|
||||
* Simple object used to construct a list of <key,value> items.
|
||||
* This type is used in file plain metadata import as RecordType.
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*/
|
||||
|
||||
public class PlainMetadataSourceDto {
|
||||
|
||||
private List<PlainMetadataKeyValueItem> metadata;
|
||||
|
||||
/*
|
||||
* Method used to get the Metadata list
|
||||
*/
|
||||
public List<PlainMetadataKeyValueItem> getMetadata() {
|
||||
return metadata;
|
||||
}
|
||||
|
||||
/*
|
||||
* Method used to set the metadata list
|
||||
*/
|
||||
public void setMetadata(List<PlainMetadataKeyValueItem> metadata) {
|
||||
this.metadata = metadata;
|
||||
}
|
||||
|
||||
}
|
@@ -21,7 +21,12 @@
|
||||
|
||||
<bean id="importService" class="org.dspace.importer.external.service.ImportService" scope="singleton"
|
||||
lazy-init="false" autowire="byType" destroy-method="destroy">
|
||||
|
||||
<property name="importSources">
|
||||
<list>
|
||||
<ref bean="PubmedImportService"></ref>
|
||||
<ref bean="BibtexImportService"></ref>
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<!--If multiple importServices have been configured here but only one is to be used during the lookup step (StartSubmissionLookupStep),
|
||||
@@ -36,12 +41,28 @@
|
||||
class="org.dspace.importer.external.pubmed.service.PubmedImportMetadataSourceServiceImpl" scope="singleton">
|
||||
<property name="metadataFieldMapping" ref="PubmedMetadataFieldMapping"/>
|
||||
<property name="baseAddress" value="https://eutils.ncbi.nlm.nih.gov/entrez/eutils/"/>
|
||||
|
||||
<property name="supportedExtensions">
|
||||
<list>
|
||||
<value>xml</value>
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
|
||||
<bean id="PubmedMetadataFieldMapping"
|
||||
class="org.dspace.importer.external.pubmed.metadatamapping.PubmedFieldMapping">
|
||||
</bean>
|
||||
|
||||
<bean id="BibtexImportService"
|
||||
class="org.dspace.importer.external.bibtex.service.BibtexImportMetadataSourceServiceImpl" scope="singleton">
|
||||
<property name="supportedExtensions">
|
||||
<list>
|
||||
<value>bib</value>
|
||||
<value>bibtex</value>
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<!-- Metadatafield used to check against if it's already imported or not during the JSONLookupSearcher-->
|
||||
<bean id="lookupID" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
|
||||
<constructor-arg value="dc.identifier.other"/>
|
||||
|
@@ -624,7 +624,7 @@ public class RestResourceController implements InitializingBean {
|
||||
HttpServletRequest request,
|
||||
@PathVariable String apiCategory,
|
||||
@PathVariable String model,
|
||||
@RequestParam("file") MultipartFile uploadfile)
|
||||
@RequestParam("file") List<MultipartFile> uploadfile)
|
||||
throws SQLException, FileNotFoundException, IOException, AuthorizeException {
|
||||
|
||||
checkModelPluralForm(apiCategory, model);
|
||||
|
@@ -47,9 +47,9 @@ public class CollectionResourceWorkflowGroupHalLinkFactory
|
||||
Map<String, Role> roles = WorkflowUtils.getCollectionRoles(collection);
|
||||
UUID resourceUuid = UUID.fromString(halResource.getContent().getUuid());
|
||||
for (Map.Entry<String, Role> entry : roles.entrySet()) {
|
||||
list.add(buildLink("workflowGroups/" + entry.getKey(), getMethodOn()
|
||||
list.add(buildLink("workflowGroups", getMethodOn()
|
||||
.getWorkflowGroupForRole(resourceUuid, null, null,
|
||||
entry.getKey())));
|
||||
entry.getKey())).withName(entry.getKey()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -8,11 +8,13 @@
|
||||
package org.dspace.app.rest.model.hateoas;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonUnwrapped;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.hateoas.EntityModel;
|
||||
import org.springframework.hateoas.Link;
|
||||
|
||||
@@ -49,6 +51,15 @@ public abstract class HALResource<T> extends EntityModel<T> {
|
||||
public EntityModel<T> add(Link link) {
|
||||
if (!hasLink(link.getRel())) {
|
||||
return super.add(link);
|
||||
} else {
|
||||
String name = link.getName();
|
||||
if (StringUtils.isNotBlank(name)) {
|
||||
List<Link> list = this.getLinks(link.getRel());
|
||||
// If a link of this name doesn't already exist in the list, add it
|
||||
if (!list.stream().anyMatch((l -> StringUtils.equalsIgnoreCase(l.getName(), name)))) {
|
||||
super.add(link);
|
||||
}
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
@@ -463,7 +463,7 @@ public abstract class DSpaceRestRepository<T extends RestAddressableModel, ID ex
|
||||
* @throws IOException
|
||||
* @throws AuthorizeException
|
||||
*/
|
||||
public Iterable<T> upload(HttpServletRequest request, MultipartFile uploadfile)
|
||||
public Iterable<T> upload(HttpServletRequest request, List<MultipartFile> uploadfile)
|
||||
throws SQLException, FileNotFoundException, IOException, AuthorizeException {
|
||||
Context context = obtainContext();
|
||||
Iterable<T> entity = upload(context, request, uploadfile);
|
||||
@@ -486,7 +486,7 @@ public abstract class DSpaceRestRepository<T extends RestAddressableModel, ID ex
|
||||
* @throws RepositoryMethodNotImplementedException
|
||||
*/
|
||||
protected Iterable<T> upload(Context context, HttpServletRequest request,
|
||||
MultipartFile uploadfile)
|
||||
List<MultipartFile> uploadfile)
|
||||
throws SQLException, FileNotFoundException, IOException, AuthorizeException {
|
||||
throw new RepositoryMethodNotImplementedException("No implementation found; Method not allowed!", "");
|
||||
}
|
||||
|
@@ -16,10 +16,6 @@ import java.util.List;
|
||||
import java.util.UUID;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import gr.ekt.bte.core.TransformationEngine;
|
||||
import gr.ekt.bte.core.TransformationSpec;
|
||||
import gr.ekt.bte.exceptions.BadTransformationSpec;
|
||||
import gr.ekt.bte.exceptions.MalformedSourceException;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.rest.Parameter;
|
||||
@@ -45,6 +41,7 @@ import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.WorkspaceItem;
|
||||
import org.dspace.content.service.BitstreamFormatService;
|
||||
import org.dspace.content.service.BitstreamService;
|
||||
@@ -56,14 +53,12 @@ import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.EPersonServiceImpl;
|
||||
import org.dspace.event.Event;
|
||||
import org.dspace.importer.external.datamodel.ImportRecord;
|
||||
import org.dspace.importer.external.exception.FileMultipleOccurencesException;
|
||||
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
|
||||
import org.dspace.importer.external.service.ImportService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.submit.AbstractProcessingStep;
|
||||
import org.dspace.submit.lookup.DSpaceWorkspaceItemOutputGenerator;
|
||||
import org.dspace.submit.lookup.MultipleSubmissionLookupDataLoader;
|
||||
import org.dspace.submit.lookup.SubmissionItemDataLoader;
|
||||
import org.dspace.submit.lookup.SubmissionLookupOutputGenerator;
|
||||
import org.dspace.submit.lookup.SubmissionLookupService;
|
||||
import org.dspace.submit.util.ItemSubmissionLookupDTO;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
@@ -73,10 +68,12 @@ import org.springframework.security.access.prepost.PreAuthorize;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
|
||||
/**
|
||||
* This is the repository responsible to manage WorkspaceItem Rest object
|
||||
*
|
||||
* @author Andrea Bollini (andrea.bollini at 4science.it)
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science.it)
|
||||
*/
|
||||
@Component(WorkspaceItemRest.CATEGORY + "." + WorkspaceItemRest.NAME)
|
||||
public class WorkspaceItemRestRepository extends DSpaceRestRepository<WorkspaceItemRest, Integer>
|
||||
@@ -110,15 +107,15 @@ public class WorkspaceItemRestRepository extends DSpaceRestRepository<WorkspaceI
|
||||
@Autowired
|
||||
EPersonServiceImpl epersonService;
|
||||
|
||||
@Autowired
|
||||
SubmissionLookupService submissionLookupService;
|
||||
|
||||
@Autowired
|
||||
CollectionService collectionService;
|
||||
|
||||
@Autowired
|
||||
AuthorizeService authorizeService;
|
||||
|
||||
@Autowired
|
||||
ImportService importService;
|
||||
|
||||
@Autowired
|
||||
private UriListHandlerService uriListHandlerService;
|
||||
|
||||
@@ -360,147 +357,87 @@ public class WorkspaceItemRestRepository extends DSpaceRestRepository<WorkspaceI
|
||||
|
||||
@Override
|
||||
public Iterable<WorkspaceItemRest> upload(Context context, HttpServletRequest request,
|
||||
MultipartFile uploadfile)
|
||||
List<MultipartFile> uploadfiles)
|
||||
throws SQLException, FileNotFoundException, IOException, AuthorizeException {
|
||||
File file = Utils.getFile(uploadfile, "upload-loader", "filedataloader");
|
||||
List<WorkspaceItemRest> results = new ArrayList<>();
|
||||
|
||||
String uuid = request.getParameter("owningCollection");
|
||||
if (StringUtils.isBlank(uuid)) {
|
||||
uuid = configurationService.getProperty("submission.default.collection");
|
||||
}
|
||||
Collection collection = null;
|
||||
if (StringUtils.isNotBlank(uuid)) {
|
||||
collection = collectionService.find(context, UUID.fromString(uuid));
|
||||
} else {
|
||||
collection = collectionService.findAuthorizedOptimized(context, Constants.ADD).get(0);
|
||||
}
|
||||
|
||||
SubmissionConfig submissionConfig =
|
||||
submissionConfigReader.getSubmissionConfigByCollection(collection.getHandle());
|
||||
List<WorkspaceItem> result = null;
|
||||
List<ImportRecord> records = new ArrayList<>();
|
||||
try {
|
||||
String uuid = request.getParameter("collection");
|
||||
if (StringUtils.isBlank(uuid)) {
|
||||
uuid = configurationService.getProperty("submission.default.collection");
|
||||
}
|
||||
|
||||
Collection collection = null;
|
||||
if (StringUtils.isNotBlank(uuid)) {
|
||||
collection = collectionService.find(context, UUID.fromString(uuid));
|
||||
} else {
|
||||
collection = collectionService.findAuthorizedOptimized(context, Constants.ADD).get(0);
|
||||
}
|
||||
|
||||
SubmissionConfig submissionConfig =
|
||||
submissionConfigReader.getSubmissionConfigByCollection(collection.getHandle());
|
||||
|
||||
|
||||
List<ItemSubmissionLookupDTO> tmpResult = new ArrayList<ItemSubmissionLookupDTO>();
|
||||
|
||||
TransformationEngine transformationEngine1 = submissionLookupService.getPhase1TransformationEngine();
|
||||
TransformationSpec spec = new TransformationSpec();
|
||||
// FIXME this is mostly due to the need to test. The BTE framework has an assert statement that check if the
|
||||
// number of found record is less than the requested and treat 0 as is, instead, the implementation assume
|
||||
// 0=unlimited this lead to test failure.
|
||||
// It is unclear if BTE really respect values other than 0/MAX allowing us to put a protection against heavy
|
||||
// load
|
||||
spec.setNumberOfRecords(Integer.MAX_VALUE);
|
||||
if (transformationEngine1 != null) {
|
||||
MultipleSubmissionLookupDataLoader dataLoader =
|
||||
(MultipleSubmissionLookupDataLoader) transformationEngine1.getDataLoader();
|
||||
|
||||
List<String> fileDataLoaders = submissionLookupService.getFileProviders();
|
||||
for (String fileDataLoader : fileDataLoaders) {
|
||||
dataLoader.setFile(file.getAbsolutePath(), fileDataLoader);
|
||||
|
||||
try {
|
||||
SubmissionLookupOutputGenerator outputGenerator =
|
||||
(SubmissionLookupOutputGenerator) transformationEngine1.getOutputGenerator();
|
||||
outputGenerator.setDtoList(new ArrayList<ItemSubmissionLookupDTO>());
|
||||
log.debug("BTE transformation is about to start!");
|
||||
transformationEngine1.transform(spec);
|
||||
log.debug("BTE transformation finished!");
|
||||
tmpResult.addAll(outputGenerator.getDtoList());
|
||||
if (!tmpResult.isEmpty()) {
|
||||
//exit with the results founded on the first data provided
|
||||
break;
|
||||
}
|
||||
} catch (BadTransformationSpec e1) {
|
||||
log.error(e1.getMessage(), e1);
|
||||
} catch (MalformedSourceException e1) {
|
||||
log.error(e1.getMessage(), e1);
|
||||
for (MultipartFile mpFile : uploadfiles) {
|
||||
File file = Utils.getFile(mpFile, "upload-loader", "filedataloader");
|
||||
try {
|
||||
ImportRecord record = importService.getRecord(file, mpFile.getOriginalFilename());
|
||||
if (record != null) {
|
||||
records.add(record);
|
||||
break;
|
||||
}
|
||||
} finally {
|
||||
file.delete();
|
||||
}
|
||||
}
|
||||
} catch (FileMultipleOccurencesException e) {
|
||||
throw new UnprocessableEntityException("Too many entries in file");
|
||||
} catch (Exception e) {
|
||||
log.error("Error importing metadata", e);
|
||||
}
|
||||
WorkspaceItem source = submissionService.
|
||||
createWorkspaceItem(context, getRequestService().getCurrentRequest());
|
||||
merge(context, records, source);
|
||||
result = new ArrayList<>();
|
||||
result.add(source);
|
||||
|
||||
List<WorkspaceItem> result = null;
|
||||
|
||||
//try to ingest workspaceitems
|
||||
if (!tmpResult.isEmpty()) {
|
||||
TransformationEngine transformationEngine2 = submissionLookupService.getPhase2TransformationEngine();
|
||||
if (transformationEngine2 != null) {
|
||||
SubmissionItemDataLoader dataLoader =
|
||||
(SubmissionItemDataLoader) transformationEngine2.getDataLoader();
|
||||
dataLoader.setDtoList(tmpResult);
|
||||
// dataLoader.setProviders()
|
||||
|
||||
DSpaceWorkspaceItemOutputGenerator outputGenerator =
|
||||
(DSpaceWorkspaceItemOutputGenerator) transformationEngine2.getOutputGenerator();
|
||||
outputGenerator.setCollection(collection);
|
||||
outputGenerator.setContext(context);
|
||||
outputGenerator.setFormName(submissionConfig.getSubmissionName());
|
||||
outputGenerator.setDto(tmpResult.get(0));
|
||||
|
||||
try {
|
||||
transformationEngine2.transform(spec);
|
||||
result = outputGenerator.getWitems();
|
||||
} catch (BadTransformationSpec e1) {
|
||||
e1.printStackTrace();
|
||||
} catch (MalformedSourceException e1) {
|
||||
e1.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//we have to create the workspaceitem to push the file also if nothing found before
|
||||
if (result == null) {
|
||||
WorkspaceItem source =
|
||||
submissionService.createWorkspaceItem(context, getRequestService().getCurrentRequest());
|
||||
result = new ArrayList<>();
|
||||
result.add(source);
|
||||
}
|
||||
|
||||
//perform upload of bitstream if there is exact one result and convert workspaceitem to entity rest
|
||||
if (result != null && !result.isEmpty()) {
|
||||
for (WorkspaceItem wi : result) {
|
||||
|
||||
List<ErrorRest> errors = new ArrayList<ErrorRest>();
|
||||
|
||||
//load bitstream into bundle ORIGINAL only if there is one result (approximately this is the
|
||||
// right behaviour for pdf file but not for other bibliographic format e.g. bibtex)
|
||||
if (result.size() == 1) {
|
||||
|
||||
for (int i = 0; i < submissionConfig.getNumberOfSteps(); i++) {
|
||||
SubmissionStepConfig stepConfig = submissionConfig.getStep(i);
|
||||
|
||||
ClassLoader loader = this.getClass().getClassLoader();
|
||||
Class stepClass;
|
||||
try {
|
||||
stepClass = loader.loadClass(stepConfig.getProcessingClassName());
|
||||
|
||||
Object stepInstance = stepClass.newInstance();
|
||||
if (UploadableStep.class.isAssignableFrom(stepClass)) {
|
||||
UploadableStep uploadableStep = (UploadableStep) stepInstance;
|
||||
ErrorRest err = uploadableStep.upload(context, submissionService, stepConfig, wi,
|
||||
uploadfile);
|
||||
//perform upload of bitstream if there is exact one result and convert workspaceitem to entity rest
|
||||
if (!result.isEmpty()) {
|
||||
for (WorkspaceItem wi : result) {
|
||||
List<ErrorRest> errors = new ArrayList<ErrorRest>();
|
||||
wi.setMultipleFiles(uploadfiles.size() > 1);
|
||||
//load bitstream into bundle ORIGINAL only if there is one result (approximately this is the
|
||||
// right behaviour for pdf file but not for other bibliographic format e.g. bibtex)
|
||||
if (result.size() == 1) {
|
||||
for (int i = 0; i < submissionConfig.getNumberOfSteps(); i++) {
|
||||
SubmissionStepConfig stepConfig = submissionConfig.getStep(i);
|
||||
ClassLoader loader = this.getClass().getClassLoader();
|
||||
Class stepClass;
|
||||
try {
|
||||
stepClass = loader.loadClass(stepConfig.getProcessingClassName());
|
||||
Object stepInstance = stepClass.newInstance();
|
||||
if (UploadableStep.class.isAssignableFrom(stepClass)) {
|
||||
UploadableStep uploadableStep = (UploadableStep) stepInstance;
|
||||
for (MultipartFile mpFile : uploadfiles) {
|
||||
ErrorRest err = uploadableStep.upload(context,
|
||||
submissionService, stepConfig, wi, mpFile);
|
||||
if (err != null) {
|
||||
errors.add(err);
|
||||
}
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error(e.getMessage(), e);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
WorkspaceItemRest wsi = converter.toRest(wi, utils.obtainProjection());
|
||||
if (result.size() == 1) {
|
||||
if (!errors.isEmpty()) {
|
||||
wsi.getErrors().addAll(errors);
|
||||
}
|
||||
}
|
||||
results.add(wsi);
|
||||
}
|
||||
WorkspaceItemRest wsi = converter.toRest(wi, utils.obtainProjection());
|
||||
if (result.size() == 1) {
|
||||
if (!errors.isEmpty()) {
|
||||
wsi.getErrors().addAll(errors);
|
||||
}
|
||||
}
|
||||
results.add(wsi);
|
||||
}
|
||||
} finally {
|
||||
file.delete();
|
||||
}
|
||||
return results;
|
||||
}
|
||||
@@ -551,4 +488,24 @@ public class WorkspaceItemRestRepository extends DSpaceRestRepository<WorkspaceI
|
||||
public Class<Integer> getPKClass() {
|
||||
return Integer.class;
|
||||
}
|
||||
|
||||
private void merge(Context context, List<ImportRecord> records, WorkspaceItem item) throws SQLException {
|
||||
for (MetadataValue metadataValue : itemService.getMetadata(
|
||||
item.getItem(), Item.ANY, Item.ANY, Item.ANY, Item.ANY)) {
|
||||
itemService.clearMetadata(context, item.getItem(),
|
||||
metadataValue.getMetadataField().getMetadataSchema().getNamespace(),
|
||||
metadataValue.getMetadataField().getElement(),
|
||||
metadataValue.getMetadataField().getQualifier(),
|
||||
metadataValue.getLanguage());
|
||||
}
|
||||
for (ImportRecord record : records) {
|
||||
if (record != null && record.getValueList() != null) {
|
||||
for (MetadatumDTO metadataValue : record.getValueList()) {
|
||||
itemService.addMetadata(context, item.getItem(), metadataValue.getSchema(),
|
||||
metadataValue.getElement(), metadataValue.getQualifier(), null,
|
||||
metadataValue.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -871,7 +871,7 @@ public class WorkspaceItemRestRepositoryIT extends AbstractControllerIntegration
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
public void createMultipleWorkspaceItemFromFileTest() throws Exception {
|
||||
public void createSingleWorkspaceItemFromFileWithOneEntryTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
//** GIVEN **
|
||||
@@ -892,57 +892,260 @@ public class WorkspaceItemRestRepositoryIT extends AbstractControllerIntegration
|
||||
.build();
|
||||
|
||||
InputStream bibtex = getClass().getResourceAsStream("bibtex-test.bib");
|
||||
final MockMultipartFile bibtexFile = new MockMultipartFile("file", "bibtex-test.bib", "application/x-bibtex",
|
||||
bibtex);
|
||||
final MockMultipartFile bibtexFile = new MockMultipartFile("file", "/local/path/bibtex-test.bib",
|
||||
"application/x-bibtex", bibtex);
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
String authToken = getAuthToken(eperson.getEmail(), password);
|
||||
// bulk create workspaceitems in the default collection (col1)
|
||||
// create a workspaceitem from a single bibliographic entry file explicitly in the default collection (col1)
|
||||
getClient(authToken).perform(fileUpload("/api/submission/workspaceitems")
|
||||
.file(bibtexFile))
|
||||
// bulk create should return 200, 201 (created) is better for single resource
|
||||
// create should return 200, 201 (created) is better for single resource
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.traditionalpageone['dc.title'][0].value",
|
||||
is("My Article")))
|
||||
.andExpect(
|
||||
jsonPath("$._embedded.workspaceitems[0]._embedded.collection.id", is(col1.getID().toString())))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[1].sections.traditionalpageone['dc.title'][0].value",
|
||||
is("My Article 2")))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.upload.files[0]"
|
||||
+ ".metadata['dc.source'][0].value",
|
||||
is("/local/path/bibtex-test.bib")))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.upload.files[0]"
|
||||
+ ".metadata['dc.title'][0].value",
|
||||
is("bibtex-test.bib")))
|
||||
.andExpect(
|
||||
jsonPath("$._embedded.workspaceitems[1]._embedded.collection.id", is(col1.getID().toString())))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[2].sections.traditionalpageone['dc.title'][0].value",
|
||||
is("My Article 3")))
|
||||
.andExpect(
|
||||
jsonPath("$._embedded.workspaceitems[2]._embedded.collection.id", is(col1.getID().toString())))
|
||||
.andExpect(
|
||||
jsonPath("$._embedded.workspaceitems[*]._embedded.upload").doesNotExist())
|
||||
jsonPath("$._embedded.workspaceitems[*]._embedded.upload").doesNotExist());
|
||||
;
|
||||
|
||||
// bulk create workspaceitems explicitly in the col2
|
||||
// create a workspaceitem from a single bibliographic entry file explicitly in the col2
|
||||
getClient(authToken).perform(fileUpload("/api/submission/workspaceitems")
|
||||
.file(bibtexFile)
|
||||
.param("collection", col2.getID().toString()))
|
||||
.param("owningCollection", col2.getID().toString()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.traditionalpageone['dc.title'][0].value",
|
||||
is("My Article")))
|
||||
.andExpect(
|
||||
jsonPath("$._embedded.workspaceitems[0]._embedded.collection.id", is(col2.getID().toString())))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[1].sections.traditionalpageone['dc.title'][0].value",
|
||||
is("My Article 2")))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.upload.files[0]"
|
||||
+ ".metadata['dc.source'][0].value",
|
||||
is("/local/path/bibtex-test.bib")))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.upload"
|
||||
+ ".files[0].metadata['dc.title'][0].value",
|
||||
is("bibtex-test.bib")))
|
||||
.andExpect(
|
||||
jsonPath("$._embedded.workspaceitems[1]._embedded.collection.id", is(col2.getID().toString())))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[2].sections.traditionalpageone['dc.title'][0].value",
|
||||
is("My Article 3")))
|
||||
.andExpect(
|
||||
jsonPath("$._embedded.workspaceitems[2]._embedded.collection.id", is(col2.getID().toString())))
|
||||
.andExpect(
|
||||
jsonPath("$._embedded.workspaceitems[*]._embedded.upload").doesNotExist())
|
||||
;
|
||||
jsonPath("$._embedded.workspaceitems[*]._embedded.upload").doesNotExist());
|
||||
|
||||
bibtex.close();
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
/**
|
||||
* Test the creation of workspaceitems POSTing to the resource collection endpoint a
|
||||
* bibtex and pubmed files
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
public void createSingleWorkspaceItemFromMultipleFilesWithOneEntryTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
//** GIVEN **
|
||||
//1. A community-collection structure with one parent community with sub-community and two collections.
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Parent Community")
|
||||
.build();
|
||||
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
|
||||
.withName("Sub Community")
|
||||
.build();
|
||||
Collection col1 = CollectionBuilder.createCollection(context, child1)
|
||||
.withName("Collection 1")
|
||||
.withSubmitterGroup(eperson)
|
||||
.build();
|
||||
Collection col2 = CollectionBuilder.createCollection(context, child1)
|
||||
.withName("Collection 2")
|
||||
.withSubmitterGroup(eperson)
|
||||
.build();
|
||||
|
||||
InputStream bibtex = getClass().getResourceAsStream("bibtex-test.bib");
|
||||
final MockMultipartFile bibtexFile = new MockMultipartFile("file", "/local/path/bibtex-test.bib",
|
||||
"application/x-bibtex", bibtex);
|
||||
InputStream xmlIS = getClass().getResourceAsStream("pubmed-test.xml");
|
||||
final MockMultipartFile pubmedFile = new MockMultipartFile("file", "/local/path/pubmed-test.xml",
|
||||
"application/xml", xmlIS);
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
String authToken = getAuthToken(eperson.getEmail(), password);
|
||||
// create a workspaceitem from a single bibliographic entry file explicitly in the default collection (col1)
|
||||
getClient(authToken).perform(fileUpload("/api/submission/workspaceitems")
|
||||
.file(bibtexFile).file(pubmedFile))
|
||||
// create should return 200, 201 (created) is better for single resource
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.traditionalpageone['dc.title'][0].value",
|
||||
is("My Article")))
|
||||
.andExpect(
|
||||
jsonPath("$._embedded.workspaceitems[0]._embedded.collection.id", is(col1.getID().toString())))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.upload.files[0]"
|
||||
+ ".metadata['dc.source'][0].value",
|
||||
is("/local/path/bibtex-test.bib")))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.upload.files[0]"
|
||||
+ ".metadata['dc.title'][0].value",
|
||||
is("bibtex-test.bib")))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.traditionalpageone['dc.title'][1].value")
|
||||
.doesNotExist())
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.upload.files[1]"
|
||||
+ ".metadata['dc.source'][0].value",
|
||||
is("/local/path/pubmed-test.xml")))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.upload.files[1]"
|
||||
+ ".metadata['dc.title'][0].value",
|
||||
is("pubmed-test.xml")));
|
||||
|
||||
// create a workspaceitem from a single bibliographic entry file explicitly in the col2
|
||||
getClient(authToken).perform(fileUpload("/api/submission/workspaceitems")
|
||||
.file(bibtexFile).file(pubmedFile)
|
||||
.param("owningCollection", col2.getID().toString()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.traditionalpageone['dc.title'][0].value",
|
||||
is("My Article")))
|
||||
.andExpect(
|
||||
jsonPath("$._embedded.workspaceitems[0]._embedded.collection.id", is(col2.getID().toString())))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.upload.files[0]"
|
||||
+ ".metadata['dc.source'][0].value",
|
||||
is("/local/path/bibtex-test.bib")))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.upload"
|
||||
+ ".files[0].metadata['dc.title'][0].value",
|
||||
is("bibtex-test.bib")))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.traditionalpageone['dc.title'][1].value")
|
||||
.doesNotExist())
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.upload.files[1]"
|
||||
+ ".metadata['dc.source'][0].value",
|
||||
is("/local/path/pubmed-test.xml")))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.upload.files[1]"
|
||||
+ ".metadata['dc.title'][0].value",
|
||||
is("pubmed-test.xml")));
|
||||
bibtex.close();
|
||||
xmlIS.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
/**
|
||||
* Test the creation of workspaceitems POSTing to the resource collection endpoint a bibtex file
|
||||
* contains more than one entry.
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
public void createSingleWorkspaceItemsFromSingleFileWithMultipleEntriesTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
//** GIVEN **
|
||||
//1. A community-collection structure with one parent community with sub-community and two collections.
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Parent Community")
|
||||
.build();
|
||||
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
|
||||
.withName("Sub Community")
|
||||
.build();
|
||||
Collection col1 = CollectionBuilder.createCollection(context, child1)
|
||||
.withName("Collection 1")
|
||||
.withSubmitterGroup(eperson)
|
||||
.build();
|
||||
Collection col2 = CollectionBuilder.createCollection(context, child1)
|
||||
.withName("Collection 2")
|
||||
.withSubmitterGroup(eperson)
|
||||
.build();
|
||||
|
||||
InputStream bibtex = getClass().getResourceAsStream("bibtex-test-3-entries.bib");
|
||||
final MockMultipartFile bibtexFile = new MockMultipartFile("file", "bibtex-test-3-entries.bib",
|
||||
"application/x-bibtex",
|
||||
bibtex);
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
String authToken = getAuthToken(eperson.getEmail(), password);
|
||||
// create a workspaceitem from a single bibliographic entry file explicitly in the default collection (col1)
|
||||
getClient(authToken).perform(fileUpload("/api/submission/workspaceitems")
|
||||
.file(bibtexFile))
|
||||
// create should return return a 422 because we don't allow/support bibliographic files
|
||||
// that have multiple metadata records
|
||||
.andExpect(status().is(422));
|
||||
bibtex.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
/**
|
||||
* Test the creation of workspaceitems POSTing to the resource collection endpoint a pubmed XML
|
||||
* file.
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
public void createPubmedWorkspaceItemFromFileTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
//** GIVEN **
|
||||
//1. A community-collection structure with one parent community with sub-community and two collections.
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Parent Community")
|
||||
.build();
|
||||
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
|
||||
.withName("Sub Community")
|
||||
.build();
|
||||
Collection col1 = CollectionBuilder.createCollection(context, child1)
|
||||
.withName("Collection 1")
|
||||
.withSubmitterGroup(eperson)
|
||||
.build();
|
||||
Collection col2 = CollectionBuilder.createCollection(context, child1)
|
||||
.withName("Collection 2")
|
||||
.withSubmitterGroup(eperson)
|
||||
.build();
|
||||
InputStream xmlIS = getClass().getResourceAsStream("pubmed-test.xml");
|
||||
final MockMultipartFile pubmedFile = new MockMultipartFile("file", "/local/path/pubmed-test.xml",
|
||||
"application/xml", xmlIS);
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
String authToken = getAuthToken(eperson.getEmail(), password);
|
||||
// create a workspaceitem from a single bibliographic entry file explicitly in the default collection (col1)
|
||||
getClient(authToken).perform(fileUpload("/api/submission/workspaceitems")
|
||||
.file(pubmedFile))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.traditionalpageone['dc.title'][0].value",
|
||||
is("Multistep microreactions with proteins using electrocapture technology.")))
|
||||
.andExpect(
|
||||
jsonPath(
|
||||
"$._embedded.workspaceitems[0].sections.traditionalpageone['dc.identifier.other'][0].value",
|
||||
is("15117179")))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.traditionalpageone"
|
||||
+ "['dc.contributor.author'][0].value",
|
||||
is("Astorga-Wells, Juan")))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.upload.files[0]"
|
||||
+ ".metadata['dc.source'][0].value",
|
||||
is("/local/path/pubmed-test.xml")))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.upload.files[0]"
|
||||
+ ".metadata['dc.title'][0].value",
|
||||
is("pubmed-test.xml")));
|
||||
|
||||
// create a workspaceitem from a single bibliographic entry file explicitly in the col2
|
||||
getClient(authToken).perform(fileUpload("/api/submission/workspaceitems")
|
||||
.file(pubmedFile)
|
||||
.param("owningCollection", col2.getID().toString()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.traditionalpageone['dc.title'][0].value",
|
||||
is("Multistep microreactions with proteins using electrocapture technology.")))
|
||||
.andExpect(
|
||||
jsonPath(
|
||||
"$._embedded.workspaceitems[0].sections.traditionalpageone['dc.identifier.other'][0].value",
|
||||
is("15117179")))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.traditionalpageone"
|
||||
+ "['dc.contributor.author'][0].value",
|
||||
is("Astorga-Wells, Juan")))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.upload.files[0].metadata['dc.source'][0].value",
|
||||
is("/local/path/pubmed-test.xml")))
|
||||
.andExpect(jsonPath("$._embedded.workspaceitems[0].sections.upload.files[0].metadata['dc.title'][0].value",
|
||||
is("pubmed-test.xml")));
|
||||
|
||||
xmlIS.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
/**
|
||||
* Test the creation of a workspaceitem POSTing to the resource collection endpoint a PDF file. As a single item
|
||||
@@ -1052,7 +1255,7 @@ public class WorkspaceItemRestRepositoryIT extends AbstractControllerIntegration
|
||||
|
||||
// create an empty workspaceitem explicitly in the col1, check validation on creation
|
||||
getClient(authToken).perform(post("/api/submission/workspaceitems")
|
||||
.param("collection", col1.getID().toString())
|
||||
.param("owningCollection", col1.getID().toString())
|
||||
.contentType(org.springframework.http.MediaType.APPLICATION_JSON))
|
||||
.andExpect(status().isCreated())
|
||||
// title and dateissued are required in the first panel
|
||||
|
@@ -0,0 +1,14 @@
|
||||
@misc{ Nobody01,
|
||||
author = "Nobody Jr",
|
||||
title = "My Article",
|
||||
year = "2006" }
|
||||
|
||||
@misc{ Nobody02,
|
||||
author = "Nobody Jr",
|
||||
title = "My Article 2",
|
||||
year = "2006" }
|
||||
|
||||
@misc{ Nobody03,
|
||||
author = "Nobody Jr",
|
||||
title = "My Article 3",
|
||||
year = "2018" }
|
@@ -1,14 +1,4 @@
|
||||
@misc{ Nobody01,
|
||||
author = "Nobody Jr",
|
||||
title = "My Article",
|
||||
year = "2006" }
|
||||
|
||||
@misc{ Nobody02,
|
||||
author = "Nobody Jr",
|
||||
title = "My Article 2",
|
||||
year = "2006" }
|
||||
|
||||
@misc{ Nobody03,
|
||||
author = "Nobody Jr",
|
||||
title = "My Article 3",
|
||||
year = "2018" }
|
||||
year = "2006" }
|
@@ -0,0 +1,151 @@
|
||||
<?xml version="1.0" ?>
|
||||
<!DOCTYPE PubmedArticleSet PUBLIC "-//NLM//DTD PubMedArticle, 1st January 2019//EN" "https://dtd.nlm.nih.gov/ncbi/pubmed/out/pubmed_190101.dtd">
|
||||
<PubmedArticleSet>
|
||||
<PubmedArticle>
|
||||
<MedlineCitation Status="MEDLINE" Owner="NLM">
|
||||
<PMID Version="1">15117179</PMID>
|
||||
<DateCompleted>
|
||||
<Year>2005</Year>
|
||||
<Month>02</Month>
|
||||
<Day>15</Day>
|
||||
</DateCompleted>
|
||||
<DateRevised>
|
||||
<Year>2006</Year>
|
||||
<Month>11</Month>
|
||||
<Day>15</Day>
|
||||
</DateRevised>
|
||||
<Article PubModel="Print">
|
||||
<Journal>
|
||||
<ISSN IssnType="Print">0003-2700</ISSN>
|
||||
<JournalIssue CitedMedium="Print">
|
||||
<Volume>76</Volume>
|
||||
<Issue>9</Issue>
|
||||
<PubDate>
|
||||
<Year>2004</Year>
|
||||
<Month>May</Month>
|
||||
<Day>01</Day>
|
||||
</PubDate>
|
||||
</JournalIssue>
|
||||
<Title>Analytical chemistry</Title>
|
||||
<ISOAbbreviation>Anal. Chem.</ISOAbbreviation>
|
||||
</Journal>
|
||||
<ArticleTitle>Multistep microreactions with proteins using electrocapture technology.</ArticleTitle>
|
||||
<Pagination>
|
||||
<MedlinePgn>2425-9</MedlinePgn>
|
||||
</Pagination>
|
||||
<Abstract>
|
||||
<AbstractText>A method to perform multistep reactions by means of electroimmobilization of a target molecule in a microflow stream is presented. A target protein is captured by the opposing effects between the hydrodynamic and electric forces, after which another medium is injected into the system. The second medium carries enzymes or other reagents, which are brought into contact with the target protein and react. The immobilization is reversed by disconnecting the electric field, upon which products are collected at the outlet of the device for analysis. On-line reduction, alkylation, and trypsin digestion of proteins is demonstrated and was monitored by MALDI mass spectrometry.</AbstractText>
|
||||
</Abstract>
|
||||
<AuthorList CompleteYN="Y">
|
||||
<Author ValidYN="Y">
|
||||
<LastName>Astorga-Wells</LastName>
|
||||
<ForeName>Juan</ForeName>
|
||||
<Initials>J</Initials>
|
||||
<AffiliationInfo>
|
||||
<Affiliation>Department of Medical Biochemistry and Biophysics, Karolinska Institutet, SE-171 77 Stockholm, Sweden.</Affiliation>
|
||||
</AffiliationInfo>
|
||||
</Author>
|
||||
<Author ValidYN="Y">
|
||||
<LastName>Bergman</LastName>
|
||||
<ForeName>Tomas</ForeName>
|
||||
<Initials>T</Initials>
|
||||
</Author>
|
||||
<Author ValidYN="Y">
|
||||
<LastName>Jörnvall</LastName>
|
||||
<ForeName>Hans</ForeName>
|
||||
<Initials>H</Initials>
|
||||
</Author>
|
||||
</AuthorList>
|
||||
<Language>eng</Language>
|
||||
<PublicationTypeList>
|
||||
<PublicationType UI="D016428">Journal Article</PublicationType>
|
||||
<PublicationType UI="D013485">Research Support, Non-U.S. Gov't</PublicationType>
|
||||
</PublicationTypeList>
|
||||
</Article>
|
||||
<MedlineJournalInfo>
|
||||
<Country>United States</Country>
|
||||
<MedlineTA>Anal Chem</MedlineTA>
|
||||
<NlmUniqueID>0370536</NlmUniqueID>
|
||||
<ISSNLinking>0003-2700</ISSNLinking>
|
||||
</MedlineJournalInfo>
|
||||
<ChemicalList>
|
||||
<Chemical>
|
||||
<RegistryNumber>0</RegistryNumber>
|
||||
<NameOfSubstance UI="D011506">Proteins</NameOfSubstance>
|
||||
</Chemical>
|
||||
<Chemical>
|
||||
<RegistryNumber>EC 3.4.21.4</RegistryNumber>
|
||||
<NameOfSubstance UI="D014357">Trypsin</NameOfSubstance>
|
||||
</Chemical>
|
||||
</ChemicalList>
|
||||
<CitationSubset>IM</CitationSubset>
|
||||
<MeshHeadingList>
|
||||
<MeshHeading>
|
||||
<DescriptorName UI="D000818" MajorTopicYN="N">Animals</DescriptorName>
|
||||
</MeshHeading>
|
||||
<MeshHeading>
|
||||
<DescriptorName UI="D002417" MajorTopicYN="N">Cattle</DescriptorName>
|
||||
</MeshHeading>
|
||||
<MeshHeading>
|
||||
<DescriptorName UI="D004563" MajorTopicYN="N">Electrochemistry</DescriptorName>
|
||||
</MeshHeading>
|
||||
<MeshHeading>
|
||||
<DescriptorName UI="D006736" MajorTopicYN="N">Horses</DescriptorName>
|
||||
</MeshHeading>
|
||||
<MeshHeading>
|
||||
<DescriptorName UI="D044085" MajorTopicYN="N">Microfluidics</DescriptorName>
|
||||
<QualifierName UI="Q000295" MajorTopicYN="N">instrumentation</QualifierName>
|
||||
<QualifierName UI="Q000379" MajorTopicYN="Y">methods</QualifierName>
|
||||
</MeshHeading>
|
||||
<MeshHeading>
|
||||
<DescriptorName UI="D010449" MajorTopicYN="N">Peptide Mapping</DescriptorName>
|
||||
<QualifierName UI="Q000379" MajorTopicYN="N">methods</QualifierName>
|
||||
</MeshHeading>
|
||||
<MeshHeading>
|
||||
<DescriptorName UI="D011506" MajorTopicYN="N">Proteins</DescriptorName>
|
||||
<QualifierName UI="Q000032" MajorTopicYN="Y">analysis</QualifierName>
|
||||
<QualifierName UI="Q000737" MajorTopicYN="N">chemistry</QualifierName>
|
||||
</MeshHeading>
|
||||
<MeshHeading>
|
||||
<DescriptorName UI="D019032" MajorTopicYN="N">Spectrometry, Mass, Matrix-Assisted Laser Desorption-Ionization</DescriptorName>
|
||||
<QualifierName UI="Q000379" MajorTopicYN="N">methods</QualifierName>
|
||||
</MeshHeading>
|
||||
<MeshHeading>
|
||||
<DescriptorName UI="D014357" MajorTopicYN="N">Trypsin</DescriptorName>
|
||||
<QualifierName UI="Q000737" MajorTopicYN="N">chemistry</QualifierName>
|
||||
</MeshHeading>
|
||||
</MeshHeadingList>
|
||||
</MedlineCitation>
|
||||
<PubmedData>
|
||||
<History>
|
||||
<PubMedPubDate PubStatus="pubmed">
|
||||
<Year>2004</Year>
|
||||
<Month>5</Month>
|
||||
<Day>1</Day>
|
||||
<Hour>5</Hour>
|
||||
<Minute>0</Minute>
|
||||
</PubMedPubDate>
|
||||
<PubMedPubDate PubStatus="medline">
|
||||
<Year>2005</Year>
|
||||
<Month>2</Month>
|
||||
<Day>16</Day>
|
||||
<Hour>9</Hour>
|
||||
<Minute>0</Minute>
|
||||
</PubMedPubDate>
|
||||
<PubMedPubDate PubStatus="entrez">
|
||||
<Year>2004</Year>
|
||||
<Month>5</Month>
|
||||
<Day>1</Day>
|
||||
<Hour>5</Hour>
|
||||
<Minute>0</Minute>
|
||||
</PubMedPubDate>
|
||||
</History>
|
||||
<PublicationStatus>ppublish</PublicationStatus>
|
||||
<ArticleIdList>
|
||||
<ArticleId IdType="pubmed">15117179</ArticleId>
|
||||
<ArticleId IdType="doi">10.1021/ac0354342</ArticleId>
|
||||
</ArticleIdList>
|
||||
</PubmedData>
|
||||
</PubmedArticle>
|
||||
|
||||
</PubmedArticleSet>
|
74
dspace/config/spring/api/bibtex-integration.xml
Normal file
74
dspace/config/spring/api/bibtex-integration.xml
Normal file
@@ -0,0 +1,74 @@
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:context="http://www.springframework.org/schema/context"
|
||||
xmlns:util="http://www.springframework.org/schema/util"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans
|
||||
http://www.springframework.org/schema/beans/spring-beans-2.5.xsd
|
||||
http://www.springframework.org/schema/context
|
||||
http://www.springframework.org/schema/context/spring-context-2.5.xsd http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util.xsd"
|
||||
default-autowire-candidates="*Service,*DAO,javax.sql.DataSource">
|
||||
|
||||
<context:annotation-config/>
|
||||
<!-- allows us to use spring annotations in beans -->
|
||||
|
||||
<util:map id="bibtexMetadataFieldMap" key-type="org.dspace.importer.external.metadatamapping.MetadataFieldConfig"
|
||||
value-type="org.dspace.importer.external.metadatamapping.contributor.MetadataContributor">
|
||||
<description>Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
|
||||
only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
|
||||
what metadatafield is generated.
|
||||
</description>
|
||||
<entry key-ref="bibtex.dc.title" value-ref="bibtexTitleContrib" />
|
||||
<entry key-ref="bibtex.dc.authors" value-ref="bibtexAuthorsContrib" />
|
||||
<entry key-ref="bibtex.dc.journal" value-ref="bibtexJournalContrib" />
|
||||
<entry key-ref="bibtex.dc.issued" value-ref="bibtexIssuedContrib" />
|
||||
<entry key-ref="bibtex.dc.jissn" value-ref="bibtexJissnContrib" />
|
||||
</util:map>
|
||||
|
||||
<bean id="bibtexJissnContrib" class="org.dspace.importer.external.metadatamapping.contributor.SimpleMetadataContributor">
|
||||
<property name="field" ref="bibtex.dc.jissn"/>
|
||||
<property name="key" value="ISSN" />
|
||||
</bean>
|
||||
|
||||
<bean id="bibtexIssuedContrib" class="org.dspace.importer.external.metadatamapping.contributor.SimpleMetadataContributor">
|
||||
<property name="field" ref="bibtex.dc.issued"/>
|
||||
<property name="key" value="year" />
|
||||
</bean>
|
||||
|
||||
<bean id="bibtexJournalContrib" class="org.dspace.importer.external.metadatamapping.contributor.SimpleMetadataContributor">
|
||||
<property name="field" ref="bibtex.dc.journal"/>
|
||||
<property name="key" value="journal" />
|
||||
</bean>
|
||||
|
||||
<bean id="bibtexAuthorsContrib" class="org.dspace.importer.external.metadatamapping.contributor.SimpleMetadataContributor">
|
||||
<property name="field" ref="bibtex.dc.authors"/>
|
||||
<property name="key" value="author" />
|
||||
</bean>
|
||||
|
||||
<bean id="bibtexTitleContrib" class="org.dspace.importer.external.metadatamapping.contributor.SimpleMetadataContributor">
|
||||
<property name="field" ref="bibtex.dc.title"/>
|
||||
<property name="key" value="title" />
|
||||
</bean>
|
||||
|
||||
|
||||
|
||||
<bean id="bibtex.dc.jissn" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
|
||||
<constructor-arg value="dc.identifier.issn"/>
|
||||
</bean>
|
||||
|
||||
<bean id="bibtex.dc.issued" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
|
||||
<constructor-arg value="dc.date.issued"/>
|
||||
</bean>
|
||||
|
||||
<bean id="bibtex.dc.journal" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
|
||||
<constructor-arg value="dc.source"/>
|
||||
</bean>
|
||||
|
||||
<bean id="bibtex.dc.authors" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
|
||||
<constructor-arg value="dc.contributor.author"/>
|
||||
</bean>
|
||||
|
||||
<bean id="bibtex.dc.title" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
|
||||
<constructor-arg value="dc.title"/>
|
||||
</bean>
|
||||
|
||||
</beans>
|
Reference in New Issue
Block a user