w2p-63043 Improve related query performance and improve affiliated test

This commit is contained in:
Andrew Wood
2019-08-16 13:48:54 -04:00
parent e40a209113
commit 55cc060b13
6 changed files with 92 additions and 23 deletions

View File

@@ -772,6 +772,12 @@
<version>2.6.2</version> <version>2.6.2</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>org.hibernate.javax.persistence</groupId>
<artifactId>hibernate-jpa-2.1-api</artifactId>
<version>1.0.0.Final</version>
<scope>compile</scope>
</dependency>
</dependencies> </dependencies>
</project> </project>

View File

@@ -1610,10 +1610,12 @@ public class MetadataImport {
newLine.add(key, value); newLine.add(key, value);
} }
} else { } else {
if (!line.get(key).get(0).isEmpty()) {
newLine.add(key, line.get(key).get(0)); newLine.add(key, line.get(key).get(0));
} }
} }
} }
}
return newLine; return newLine;
} }

View File

@@ -74,7 +74,7 @@ public class MetadataValueServiceImpl implements MetadataValueService {
@Override @Override
public Iterator<MetadataValue> findByFieldAndValue(Context context, MetadataField metadataField, String value) public Iterator<MetadataValue> findByFieldAndValue(Context context, MetadataField metadataField, String value)
throws SQLException { throws SQLException {
return metadataValueDAO.findByFieldAndValue(context, metadataField, value); return metadataValueDAO.findItemValuesByFieldAndValue(context, metadataField, value);
} }
@Override @Override

View File

@@ -28,7 +28,8 @@ public interface MetadataValueDAO extends GenericDAO<MetadataValue> {
public List<MetadataValue> findByField(Context context, MetadataField fieldId) throws SQLException; public List<MetadataValue> findByField(Context context, MetadataField fieldId) throws SQLException;
public Iterator<MetadataValue> findByFieldAndValue(Context context, MetadataField metadataField, String value) public Iterator<MetadataValue> findItemValuesByFieldAndValue(Context context,
MetadataField metadataField, String value)
throws SQLException; throws SQLException;
public Iterator<MetadataValue> findByValueLike(Context context, String value) throws SQLException; public Iterator<MetadataValue> findByValueLike(Context context, String value) throws SQLException;

View File

@@ -45,15 +45,16 @@ public class MetadataValueDAOImpl extends AbstractHibernateDAO<MetadataValue> im
criteriaQuery.select(metadataValueRoot); criteriaQuery.select(metadataValueRoot);
criteriaQuery.where(criteriaBuilder.equal(join.get(MetadataField_.id), metadataField.getID())); criteriaQuery.where(criteriaBuilder.equal(join.get(MetadataField_.id), metadataField.getID()));
return list(context, criteriaQuery, false, MetadataValue.class, -1, -1); return list(context, criteriaQuery, false, MetadataValue.class, -1, -1);
} }
@Override @Override
public Iterator<MetadataValue> findByFieldAndValue(Context context, MetadataField metadataField, String value) public Iterator<MetadataValue> findItemValuesByFieldAndValue(Context context,
MetadataField metadataField, String value)
throws SQLException { throws SQLException {
String queryString = "SELECT m from MetadataValue m where metadata_field_id = :metadata_field_id " + String queryString = "SELECT m from MetadataValue m " +
"and text_value = :text_value "; "join Item i on m.dSpaceObject = i.id where m.metadataField.id = :metadata_field_id " +
"and m.value = :text_value";
Query query = createQuery(context, queryString); Query query = createQuery(context, queryString);
query.setParameter("metadata_field_id", metadataField.getID()); query.setParameter("metadata_field_id", metadataField.getID());
query.setParameter("text_value", value); query.setParameter("text_value", value);

View File

@@ -69,16 +69,26 @@ public class CSVMetadataImportReferenceTest extends AbstractEntityIntegrationTes
* @param leftItem the left item in a known relationship * @param leftItem the left item in a known relationship
* @param rightItem the right item in a known relationship * @param rightItem the right item in a known relationship
* @param expectedCount expected relationship count for a known relationship * @param expectedCount expected relationship count for a known relationship
* @param placeDirection direction of subjects relationship(s)
* @param placeCount Expected place of subject's relationship
*/ */
private void assertRelationship(Item leftItem, Item rightItem, int expectedCount) throws SQLException { private void assertRelationship(Item leftItem, Item rightItem, int expectedCount,
List<Relationship> rels = relationshipService.findByItem(context, leftItem); String placeDirection, int placeCount) throws SQLException {
List<Relationship> rels = relationshipService.findByItem(context, rightItem);
Relationship relationship = null;
int foundCount = 0; int foundCount = 0;
for (Relationship rel : rels) { for (Relationship rel : rels) {
if (rel.getLeftItem().getID().equals(leftItem.getID()) if (rel.getRightItem().getID().equals(rightItem.getID())
&& rel.getRightItem().getID().equals(rightItem.getID())) { && rel.getLeftItem().getID().equals(leftItem.getID())) {
foundCount++; foundCount++;
relationship = rel;
} }
} }
if (placeDirection.equalsIgnoreCase("left")) {
assertEquals(relationship.getLeftPlace(), placeCount);
} else {
assertEquals(relationship.getRightPlace(), placeCount);
}
assertEquals(expectedCount, foundCount); assertEquals(expectedCount, foundCount);
} }
@@ -92,7 +102,7 @@ public class CSVMetadataImportReferenceTest extends AbstractEntityIntegrationTes
"+,Person,," + col1.getHandle() + ",0", "+,Person,," + col1.getHandle() + ",0",
"+,Publication,dc.identifier.other:0," + col1.getHandle() + ",1"}; "+,Publication,dc.identifier.other:0," + col1.getHandle() + ",1"};
Item[] items = runImport(csv); Item[] items = runImport(csv);
assertRelationship(items[1], items[0], 1); assertRelationship(items[1], items[0], 1, "left", 0);
} }
/** /**
@@ -122,7 +132,7 @@ public class CSVMetadataImportReferenceTest extends AbstractEntityIntegrationTes
"+,Test Item 1,Person,," + col1.getHandle() + ",idVal,0", "+,Test Item 1,Person,," + col1.getHandle() + ",idVal,0",
"+,Test Item 2,Publication,rowName:idVal," + col1.getHandle() + ",anything,1"}; "+,Test Item 2,Publication,rowName:idVal," + col1.getHandle() + ",anything,1"};
Item[] items = runImport(csv); Item[] items = runImport(csv);
assertRelationship(items[1], items[0], 1); assertRelationship(items[1], items[0], 1, "left", 0);
} }
/** /**
@@ -136,8 +146,8 @@ public class CSVMetadataImportReferenceTest extends AbstractEntityIntegrationTes
"+,Person,," + col1.getHandle() + ",1", "+,Person,," + col1.getHandle() + ",1",
"+,Publication,dc.identifier.other:0||dc.identifier.other:1," + col1.getHandle() + ",2"}; "+,Publication,dc.identifier.other:0||dc.identifier.other:1," + col1.getHandle() + ",2"};
Item[] items = runImport(csv); Item[] items = runImport(csv);
assertRelationship(items[2], items[0], 1); assertRelationship(items[2], items[0], 1, "left", 0);
assertRelationship(items[2], items[1], 1); assertRelationship(items[2], items[1], 1, "left", 1);
} }
/** /**
@@ -151,8 +161,8 @@ public class CSVMetadataImportReferenceTest extends AbstractEntityIntegrationTes
"+,Person,," + col1.getHandle() + ",1,val2", "+,Person,," + col1.getHandle() + ",1,val2",
"+,Publication,rowName:val1||rowName:val2," + col1.getHandle() + ",2,val3"}; "+,Publication,rowName:val1||rowName:val2," + col1.getHandle() + ",2,val3"};
Item[] items = runImport(csv); Item[] items = runImport(csv);
assertRelationship(items[2], items[0], 1); assertRelationship(items[2], items[0], 1, "left", 0);
assertRelationship(items[2], items[1], 1); assertRelationship(items[2], items[1], 1, "left", 1);
} }
/** /**
@@ -167,7 +177,7 @@ public class CSVMetadataImportReferenceTest extends AbstractEntityIntegrationTes
String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,rowName,dc.identifier.other", String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,rowName,dc.identifier.other",
"+,Publication," + person.getID().toString() + "," + col1.getHandle() + ",anything,0"}; "+,Publication," + person.getID().toString() + "," + col1.getHandle() + ",anything,0"};
Item[] items = runImport(csv); Item[] items = runImport(csv);
assertRelationship(items[0], person, 1); assertRelationship(items[0], person, 1, "left", 0);
} }
/** /**
@@ -186,8 +196,8 @@ public class CSVMetadataImportReferenceTest extends AbstractEntityIntegrationTes
"+,Publication," + person.getID().toString() + "||" + person2.getID().toString() + "," + "+,Publication," + person.getID().toString() + "||" + person2.getID().toString() + "," +
col1.getHandle() + ",anything,0"}; col1.getHandle() + ",anything,0"};
Item[] items = runImport(csv); Item[] items = runImport(csv);
assertRelationship(items[0], person, 1); assertRelationship(items[0], person, 1, "left", 0);
assertRelationship(items[0], person2, 1); assertRelationship(items[0], person2, 1, "left", 1);
} }
/** /**
@@ -205,8 +215,34 @@ public class CSVMetadataImportReferenceTest extends AbstractEntityIntegrationTes
"+,Person2,Person,," + col1.getHandle() + ",idVal,0", "+,Person2,Person,," + col1.getHandle() + ",idVal,0",
"+,Pub1,Publication,dc.title:Person||dc.title:Person2," + col1.getHandle() + ",anything,1"}; "+,Pub1,Publication,dc.title:Person||dc.title:Person2," + col1.getHandle() + ",anything,1"};
Item[] items = runImport(csv); Item[] items = runImport(csv);
assertRelationship(items[1], person, 1); assertRelationship(items[1], person, 1, "left", 0);
assertRelationship(items[1], items[0], 1); assertRelationship(items[1], items[0], 1, "left", 1);
}
/**
* Test existence of newly created items with proper relationships defined in the item's metadata via
* multi mixed references. One archived item, one by metadata reference in the CSV, and one by a rowName reference
* in the CSV
*/
@Test
public void testMultiMixedRefArchivedCsv() throws Exception {
Item person = ItemBuilder.createItem(context, col1)
.withTitle("Person")
.withRelationshipType("Person")
.build();
Item person2 = ItemBuilder.createItem(context, col1)
.withTitle("Person2")
.withRelationshipType("Person")
.build();
String[] csv = {"id,dc.title,relationship.type,relation.isAuthorOfPublication,collection,rowName," +
"dc.identifier.other",
"+,Person3,Person,," + col1.getHandle() + ",idVal,0",
"+,Pub1,Publication," + person.getID() + "||dc.title:Person2||rowName:idVal," +
col1.getHandle() + ",anything,1"};
Item[] items = runImport(csv);
assertRelationship(items[1], person, 1, "left", 0);
assertRelationship(items[1], person2, 1, "left", 1);
assertRelationship(items[1], items[0], 1, "left", 2);
} }
/** /**
@@ -220,7 +256,7 @@ public class CSVMetadataImportReferenceTest extends AbstractEntityIntegrationTes
"+,Person:,Person,," + col1.getHandle() + ",idVal,0", "+,Person:,Person,," + col1.getHandle() + ",idVal,0",
"+,Pub1,Publication,dc.title:Person:," + col1.getHandle() + ",anything,1"}; "+,Pub1,Publication,dc.title:Person:," + col1.getHandle() + ",anything,1"};
Item[] items = runImport(csv); Item[] items = runImport(csv);
assertRelationship(items[1], items[0], 1); assertRelationship(items[1], items[0], 1, "left", 0);
} }
/** /**
@@ -235,6 +271,18 @@ public class CSVMetadataImportReferenceTest extends AbstractEntityIntegrationTes
assertEquals(1, performImportScript(csv, true)); assertEquals(1, performImportScript(csv, true));
} }
/**
* Test failure when referring to item by non unique metadata in the csv file.
*/
@Test
public void testNonUniqueRowName() throws Exception {
String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other,rowName",
"+,Person,," + col1.getHandle() + ",1,value",
"+,Person,," + col1.getHandle() + ",1,value",
"+,Publication,rowName:value," + col1.getHandle() + ",2"};
assertEquals(1, performImportScript(csv, true));
}
/** /**
* Test failure when referring to item by non unique metadata in the database. * Test failure when referring to item by non unique metadata in the database.
*/ */
@@ -279,6 +327,17 @@ public class CSVMetadataImportReferenceTest extends AbstractEntityIntegrationTes
assertEquals(1, performImportScript(csv, false)); assertEquals(1, performImportScript(csv, false));
} }
/**
* Test failure when refering to an item in the CSV that hasn't been created yet due to it's order in the CSV
*/
@Test
public void testCSVImportWrongOrder() throws Exception {
String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other",
"+,Publication,dc.identifier.other:8675309," + col1.getHandle() + ",2",
"+,Person,," + col1.getHandle() + ",1",};
assertEquals(1, performImportScript(csv, false));
}
/** /**
* Import mocked CSVs to test item creation behavior, deleting temporary file afterward. * Import mocked CSVs to test item creation behavior, deleting temporary file afterward.
*/ */