Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Clariah submission #922

Merged
merged 25 commits into from
Dec 9, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -147,11 +147,7 @@ public int perform(DSpaceObject dso) throws IOException
colhandle = item.getOwningCollection().getHandle();
checkExtra = checkExtraIds.contains(Integer.toString(item.getOwningCollection().getID()));
}
Metadatum[] itemTypeDCV = item.getMetadata("dc", "type", null, Item.ANY);
String itemType = null;
if(itemTypeDCV != null && itemTypeDCV.length >0){
itemType = itemTypeDCV[0].value;
}
Metadatum[] itemTypes = item.getMetadata(Item.ANY, "type", Item.ANY, Item.ANY);
for (DCInput input : getReqList(colhandle,checkExtra))
{
StringBuilder reqsb = new StringBuilder();
Expand All @@ -177,7 +173,7 @@ public int perform(DSpaceObject dso) throws IOException

if (!mdPatFound) {
Metadatum[] vals = item.getMetadataByMetadataString(req);
if ((itemType == null || input.isAllowedFor(itemType)) && vals.length == 0)
if ((itemTypes == null || itemTypes.length == 0 || input.isAllowedFor(itemTypes)) && vals.length == 0)
{
boolean issue_warning = true;
if (mdEquivalenceMap.containsKey(req)) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
package cz.cuni.mff.ufal.dspace.app.itemimport;

import cz.cuni.mff.ufal.DSpaceApi;
import cz.cuni.mff.ufal.lindat.utilities.hibernate.LicenseDefinition;
import cz.cuni.mff.ufal.lindat.utilities.interfaces.IFunctionalities;
import org.dspace.app.itemimport.ItemImport;
import org.dspace.content.*;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;

import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

public class ItemImportReplacingMetadata extends ItemImport {

private java.nio.file.FileSystem fs = java.nio.file.FileSystems.getDefault();
IFunctionalities functionalities = DSpaceApi.getFunctionalityManager();

@Override
protected void replaceItems(Context c, Collection[] mycollections, String sourceDir, String mapFile,
boolean template) throws Exception {
// verify the source directory
File d = new java.io.File(sourceDir);
List<Item> processedItems = new ArrayList<>();

if (d == null || !d.isDirectory())
{
throw new Exception("Error, cannot open source directory "
+ sourceDir);
}

// read in HashMap first, to get list of handles & source dirs
Map<String, String> myHash = readMapFile(mapFile);

for (Map.Entry<String, String> mapEntry : myHash.entrySet())
{
String itemName = mapEntry.getKey();
String handle = mapEntry.getValue();
Item item;

if (handle.indexOf('/') != -1)
{
System.out.println("\tReplacing: " + handle);

// add new item, locate old one
item = (Item) HandleManager.resolveToObject(c, handle);
}
else
{
item = Item.find(c, Integer.parseInt(handle));
}

final Metadatum[] provenance = item.getMetadataByMetadataString("dc.description.provenance");
final Metadatum[] accessioned = item.getMetadataByMetadataString("dc.date.accessioned");
final Metadatum[] available = item.getMetadataByMetadataString("dc.date.available");
final Metadatum[] branding = item.getMetadataByMetadataString("local.branding");

item.clearMetadata(Item.ANY, Item.ANY, Item.ANY, Item.ANY);
loadMetadata(c, item, java.nio.file.Paths.get(sourceDir, itemName).toString() + fs.getSeparator());
for (Metadatum[] mds : new Metadatum[][]{provenance, accessioned, available, branding}){
for(Metadatum md : mds){
item.addMetadatum(md);
}
}
processedItems.add(item);
}

//attempt at saving all changes or none;
for(Item i : processedItems){
i.update();
}
c.commit();
c.clearCache();

// attach license, license label requires an update
functionalities.openSession();
for(Item i : processedItems){
final String licenseURI = i.getMetadata("dc.rights.uri");
if(licenseURI != null) {
final LicenseDefinition license = functionalities.getLicenseByDefinition(licenseURI);
final int licenseId = license.getLicenseId();
for(Bundle bundle : i.getBundles("ORIGINAL")){
for(Bitstream b : bundle.getBitstreams()){
functionalities.detachLicenses(b.getID());
functionalities.attachLicense(licenseId, b.getID());
}
}
i.clearMetadata("dc", "rights", "label", Item.ANY);
i.addMetadata("dc", "rights", "label", Item.ANY, license.getLicenseLabel().getLabel());
i.update();
}
}
c.commit();
c.clearCache();
functionalities.closeSession();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -76,13 +76,9 @@ public int doProcessing(Context context, HttpServletRequest request,
}

// Fetch the document type (dc.type)
String documentType = "";
if( (item.getMetadataByMetadataString("dc.type") != null) && (item.getMetadataByMetadataString("dc.type").length >0) )
{
documentType = item.getMetadataByMetadataString("dc.type")[0].value;
}


Metadatum[] documentTypes = item.getMetadata(Item.ANY, "type", Item.ANY, Item.ANY);



// Step 1:
// clear out all item metadata defined on this page
Expand Down Expand Up @@ -115,7 +111,7 @@ public int doProcessing(Context context, HttpServletRequest request,
for (int j = 0; j < inputs.length; j++)
{
// Omit fields not allowed for this document type
if(!inputs[j].isAllowedFor(documentType))
if(!inputs[j].isAllowedFor(documentTypes))
{
continue;
}
Expand Down Expand Up @@ -249,7 +245,7 @@ else if (buttonPressed.equals("submit_" + fieldName + "_delete"))
{
// Do not check the required attribute if it is not visible or not allowed for the document type
String scope = subInfo.isInWorkflow() ? DCInput.WORKFLOW_SCOPE : DCInput.SUBMISSION_SCOPE;
if ( !( inputs[i].isVisible(scope) && inputs[i].isAllowedFor(documentType) ) )
if ( !( inputs[i].isVisible(scope) && inputs[i].isAllowedFor(documentTypes) ) )
{
continue;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -789,7 +789,7 @@ public void addItems(Context c, Collection[] mycollections,
}
}

private void replaceItems(Context c, Collection[] mycollections,
protected void replaceItems(Context c, Collection[] mycollections,
String sourceDir, String mapFile, boolean template) throws Exception
{
// verify the source directory
Expand Down Expand Up @@ -1048,7 +1048,7 @@ private void deleteItem(Context c, String myhandle) throws Exception
// utility methods
////////////////////////////////////
// read in the map file and generate a hashmap of (file,handle) pairs
private Map<String, String> readMapFile(String filename) throws Exception
protected Map<String, String> readMapFile(String filename) throws Exception
{
Map<String, String> myHash = new HashMap<String, String>();

Expand Down Expand Up @@ -1100,7 +1100,7 @@ private Map<String, String> readMapFile(String filename) throws Exception
}

// Load all metadata schemas into the item.
private void loadMetadata(Context c, Item myitem, String path)
protected void loadMetadata(Context c, Item myitem, String path)
throws SQLException, IOException, ParserConfigurationException,
SAXException, TransformerException, AuthorizeException
{
Expand Down
33 changes: 26 additions & 7 deletions dspace-api/src/main/java/org/dspace/app/util/DCInput.java
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@

import org.apache.commons.lang.StringUtils;
import org.dspace.content.MetadataSchema;
import org.dspace.content.Metadatum;
import org.dspace.core.Context;
import org.xml.sax.SAXException;

Expand Down Expand Up @@ -110,7 +111,7 @@ public class DCInput
private boolean closedVocabulary = false;

/** allowed document types */
private List<String> typeBind = null;
private Map<String, List<String>> typeBind = new HashMap<>();

private ComplexDefinition complexDefinition = null;

Expand Down Expand Up @@ -199,13 +200,21 @@ public DCInput(Map<String, String> fieldMap,
|| "yes".equalsIgnoreCase(closedVocabularyStr);

// parsing of the <type-bind> element (using the colon as split separator)
typeBind = new ArrayList<String>();
String typeBindDef = fieldMap.get("type-bind");
if(typeBindDef != null && typeBindDef.trim().length() > 0) {
String[] types = typeBindDef.split(",");
List<String> boundTypes = new ArrayList<>();
for(String type : types) {
typeBind.add( type.trim() );
boundTypes.add( type.trim() );
}
String typeBindField = fieldMap.get(DCInputsReader.TYPE_BIND_FIELD_ATTRIBUTE);
List<String> existingTypes = typeBind.get(typeBindField);
if(existingTypes == null){
typeBind.put(typeBindField, boundTypes);
}else{
existingTypes.addAll(boundTypes);
}

}

}
Expand Down Expand Up @@ -520,14 +529,24 @@ public boolean isClosedVocabulary() {

/**
* Decides if this field is valid for the document type
* @param typeName Document type name
* @param types Document type Metadata
* @return true when there is no type restriction or typeName is allowed
*/
public boolean isAllowedFor(String typeName) {
public boolean isAllowedFor(Metadatum[] types) {
if(typeBind.size() == 0)
return true;

return typeBind.contains(typeName);

if(types != null) {
for (Metadatum md : types) {
String fieldName = md.getField();
List<String> allowedTypes = typeBind.get(fieldName);
if(allowedTypes != null && allowedTypes.contains(md.value)){
return true;
}
}
}

return false;
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ public class DCInputsReader
static final String PAIR_TYPE_NAME = "value-pairs-name";

static final String COMPLEX_DEFINITION_REF = "complex-definition-ref";
public static final String TYPE_BIND_FIELD_ATTRIBUTE = "field";

/** The fully qualified pathname of the form definition XML file */
private String defsFile = ConfigurationManager.getProperty("dspace.dir")
Expand Down Expand Up @@ -495,7 +496,13 @@ private void processPageParts(String formName, String page, Node n, Map<String,
} else if (tagName.equals("vocabulary")) {
String closedVocabularyString = getAttribute(nd, "closed");
field.put("closedVocabulary", closedVocabularyString);
}
} else if (tagName.equals("type-bind")){
String typeField = getAttribute(nd, TYPE_BIND_FIELD_ATTRIBUTE);
if(typeField == null || typeField.trim().isEmpty()){
typeField = "dc.type";
}
field.put(TYPE_BIND_FIELD_ATTRIBUTE, typeField);
}
}
}
String missing = null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1223,6 +1223,7 @@ private int getMetadataSchemaID(Metadatum dcv) throws SQLException
MetadataSchema schema = MetadataSchema.find(ourContext,dcv.schema);
if (schema == null)
{
log.warn(String.format("Failed to find schema \"%s\" using dc instead.", dcv.schema));
schemaID = MetadataSchema.DC_SCHEMA_ID;
}
else
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ private void updateRegistries()
MetadataImporter.loadRegistry(base + "sword-metadata.xml", true);
MetadataImporter.loadRegistry(base + "metashareSchema.xml", true);
MetadataImporter.loadRegistry(base + "local-types.xml", true);
MetadataImporter.loadRegistry(base + "edm.xml", true);

// Check if XML Workflow is enabled in workflow.cfg
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("xmlworkflow"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -161,10 +161,11 @@ public int doProcessing(Context context, HttpServletRequest request,
}

// Fetch the document type (dc.type)
String documentType = "";
if( (item.getMetadataByMetadataString("dc.type") != null) && (item.getMetadataByMetadataString("dc.type").length >0) )
Metadatum[] documentTypes = null;
if( (item.getMetadata(Item.ANY, "type", Item.ANY, Item.ANY) != null) && (item.getMetadata(Item.ANY, "type",
Item.ANY, Item.ANY).length > 0) )
{
documentType = item.getMetadataByMetadataString("dc.type")[0].value;
documentTypes = item.getMetadata(Item.ANY, "type", Item.ANY, Item.ANY);
}

// Step 1:
Expand Down Expand Up @@ -208,7 +209,7 @@ public int doProcessing(Context context, HttpServletRequest request,
for (int j = 0; j < inputs.length; j++)
{
// Omit fields not allowed for this document type
if(!inputs[j].isAllowedFor(documentType))
if(!inputs[j].isAllowedFor(documentTypes))
{
continue;
}
Expand Down Expand Up @@ -339,7 +340,7 @@ else if (buttonPressed.equals("submit_" + fieldName + "_delete"))
{
// Do not check the required attribute if it is not visible or not allowed for the document type
String scope = subInfo.isInWorkflow() ? DCInput.WORKFLOW_SCOPE : DCInput.SUBMISSION_SCOPE;
if ( !( inputs[i].isVisible(scope) && inputs[i].isAllowedFor(documentType) ) )
if ( !( inputs[i].isVisible(scope) && inputs[i].isAllowedFor(documentTypes) ) )
{
continue;
}
Expand Down
Loading