Skip to content

Commit

Permalink
#5060 Add api for updating PID metadata
Browse files Browse the repository at this point in the history
  • Loading branch information
sekmiller committed Oct 12, 2018
1 parent 7d0fef1 commit 7f4e3f8
Show file tree
Hide file tree
Showing 5 changed files with 195 additions and 6 deletions.
6 changes: 6 additions & 0 deletions src/main/java/Bundle.properties
Original file line number Diff line number Diff line change
Expand Up @@ -2069,3 +2069,9 @@ admin.api.migrateHDL.failure.must.be.hdl.dataset=Dataset was not registered as a
admin.api.migrateHDL.success=Dataset migrate HDL registration complete. Dataset re-registered successfully.
admin.api.migrateHDL.failure=Failed to migrate Dataset Handle id: {0}
admin.api.migrateHDL.failureWithException=Failed to migrate Dataset Handle id: {0} Unexpected exception: {1}

#Datasets.java
datasets.api.updatePIDMetadata.failure.dataset.must.be.released=Modify Registration Metadata must be run on a published dataset.
datasets.api.updatePIDMetadata.auth.mustBeSuperUser=Forbidden. You must be a superuser.
datasets.api.updatePIDMetadata.success.for.single.dataset=Dataset {0} PID Metadata updated successfully.
datasets.api.updatePIDMetadata.success.for.update.all=All Dataset PID Metadata update completed successfully.
37 changes: 37 additions & 0 deletions src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@
import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
import edu.harvard.iq.dataverse.S3PackageImporter;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDvObjectPIDMetadataCommand;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.BundleUtil;
import edu.harvard.iq.dataverse.util.EjbUtil;
Expand All @@ -88,6 +89,7 @@
import java.io.StringReader;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashSet;
import java.util.LinkedList;
Expand Down Expand Up @@ -385,6 +387,41 @@ public Response updateDatasetTargetURLAll() {
return ok("Update All Dataset target url completed");
});
}

@GET
@Path("{id}/modifyRegistrationMetadata")
public Response updateDatasetPIDMetadata(@PathParam("id") String id) {

try {
Dataset dataset = findDatasetOrDie(id);
if (!dataset.isReleased()) {
return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.failure.dataset.must.be.released"));
}
} catch (WrappedResponse ex) {
Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
}

return response(req -> {
execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(id), req));
List<String> args = Arrays.asList(id);
return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.single.dataset", args));
});
}

@GET
@Path("/modifyRegistrationPIDMetadataAll")
public Response updateDatasetPIDMetadataAll() {
return response( req -> {
datasetService.findAll().forEach( ds -> {
try {
execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(ds.getId().toString()), req));
} catch (WrappedResponse ex) {
Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
}
});
return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.update.all"));
});
}

@PUT
@Path("{id}/versions/{versionId}")
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
package edu.harvard.iq.dataverse.engine.command.impl;

import edu.harvard.iq.dataverse.DataFile;
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.GlobalIdServiceBean;
import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand;
import edu.harvard.iq.dataverse.engine.command.CommandContext;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
import edu.harvard.iq.dataverse.util.BundleUtil;
import java.sql.Timestamp;
import java.util.Collections;
import java.util.Date;

/**
*
* @author skraffmi
* No required permissions because we are enforcing super user status in the execute
*/
@RequiredPermissions({})
public class UpdateDvObjectPIDMetadataCommand extends AbstractVoidCommand {

private final Dataset target;

public UpdateDvObjectPIDMetadataCommand(Dataset target, DataverseRequest aRequest) {
super(aRequest, target);
this.target = target;
}

@Override
protected void executeImpl(CommandContext ctxt) throws CommandException {


if (!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser()) {
throw new PermissionException(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.auth.mustBeSuperUser"),
this, Collections.singleton(Permission.EditDataset), target);
}
if (!this.target.isReleased()){
//This is for the bulk update version of the api.
//We don't want to modify drafts, but we want it to keep going
//the single dataset update api checks for drafts before calling the command
return;
}
GlobalIdServiceBean idServiceBean = GlobalIdServiceBean.getBean(target.getProtocol(), ctxt);
try {
String doiRetString = idServiceBean.modifyIdentifierTargetURL(target);
if (doiRetString != null && doiRetString.contains(target.getIdentifier())) {
target.setGlobalIdCreateTime(new Timestamp(new Date().getTime()));
ctxt.em().merge(target);
ctxt.em().flush();
for (DataFile df : target.getFiles()) {
doiRetString = idServiceBean.modifyIdentifierTargetURL(df);
if (doiRetString != null && doiRetString.contains(df.getIdentifier())) {
df.setGlobalIdCreateTime(new Timestamp(new Date().getTime()));
ctxt.em().merge(df);
ctxt.em().flush();
}
}
} else {
//do nothing - we'll know it failed because the global id create time won't have been updated.
}
} catch (Exception e) {
//do nothing - item and the problem has been logged
}
}

}
66 changes: 61 additions & 5 deletions src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
Original file line number Diff line number Diff line change
Expand Up @@ -319,7 +319,7 @@ public void testCreatePublishDestroyDataset() {
// FIXME: It would be awesome if we could just get a JSON object back instead. :(
Map<String, Object> datasetContactFromExport = with(exportDatasetAsJson.body().asString()).param("datasetContact", "datasetContact")
.getJsonObject("datasetVersion.metadataBlocks.citation.fields.find { fields -> fields.typeName == datasetContact }");
System.out.println("datasetContactFromExport: " + datasetContactFromExport);
logger.info("datasetContactFromExport: " + datasetContactFromExport);

assertEquals("datasetContact", datasetContactFromExport.get("typeName"));
List valuesArray = (ArrayList) datasetContactFromExport.get("value");
Expand Down Expand Up @@ -449,7 +449,7 @@ public void testExport() {
// FIXME: It would be awesome if we could just get a JSON object back instead. :(
Map<String, Object> datasetContactFromExport = with(exportDatasetAsJson.body().asString()).param("datasetContact", "datasetContact")
.getJsonObject("datasetVersion.metadataBlocks.citation.fields.find { fields -> fields.typeName == datasetContact }");
System.out.println("datasetContactFromExport: " + datasetContactFromExport);
logger.info("datasetContactFromExport: " + datasetContactFromExport);

assertEquals("datasetContact", datasetContactFromExport.get("typeName"));
List valuesArray = (ArrayList) datasetContactFromExport.get("value");
Expand Down Expand Up @@ -636,7 +636,7 @@ public void testSequentialNumberAsIdentifierGenerationStyle() {
.statusCode(OK.getStatusCode());

String identifier = JsonPath.from(datasetAsJson.getBody().asString()).getString("data.identifier");
System.out.println("identifier: " + identifier);
logger.info("identifier: " + identifier);
String numericPart = identifier.replace("FK2/", ""); //remove shoulder from identifier
assertTrue(StringUtils.isNumeric(numericPart));

Expand Down Expand Up @@ -680,7 +680,7 @@ public void testPrivateUrl() {
Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
createDatasetResponse.prettyPrint();
Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
System.out.println("dataset id: " + datasetId);
logger.info("dataset id: " + datasetId);

Response createContributorResponse = UtilIT.createRandomUser();
String contributorUsername = UtilIT.getUsernameFromResponse(createContributorResponse);
Expand Down Expand Up @@ -942,7 +942,7 @@ public void testFileChecksum() {
Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
createDatasetResponse.prettyPrint();
Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
System.out.println("dataset id: " + datasetId);
logger.info("dataset id: " + datasetId);

Response getDatasetJsonNoFiles = UtilIT.nativeGet(datasetId, apiToken);
getDatasetJsonNoFiles.prettyPrint();
Expand Down Expand Up @@ -1546,4 +1546,60 @@ public void testDatasetLocksApi() {
.statusCode(200);
}

/**
* This test requires the root dataverse to be published to pass.
*/
@Test
public void testUpdatePIDMetadataAPI() {

Response createUser = UtilIT.createRandomUser();
createUser.prettyPrint();
assertEquals(200, createUser.getStatusCode());
String username = UtilIT.getUsernameFromResponse(createUser);
String apiToken = UtilIT.getApiTokenFromResponse(createUser);
Response makeSuperUser = UtilIT.makeSuperUser(username);
assertEquals(200, makeSuperUser.getStatusCode());

Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
createDataverseResponse.prettyPrint();
String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);

Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
createDatasetResponse.prettyPrint();
Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");

Response getDatasetJsonBeforePublishing = UtilIT.nativeGet(datasetId, apiToken);
getDatasetJsonBeforePublishing.prettyPrint();
String protocol = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.protocol");
String authority = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.authority");
String identifier = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.identifier");
String datasetPersistentId = protocol + ":" + authority + "/" + identifier;

Response publishDataverse = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken);
assertEquals(200, publishDataverse.getStatusCode());

Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken);
assertEquals(200, publishDataset.getStatusCode());

Response getDatasetJsonAfterPublishing = UtilIT.nativeGet(datasetId, apiToken);
getDatasetJsonAfterPublishing.prettyPrint();
getDatasetJsonAfterPublishing.then().assertThat()
.body("data.latestVersion.versionNumber", equalTo(1))
.body("data.latestVersion.versionMinorNumber", equalTo(0))
.body("data.latestVersion.metadataBlocks.citation.fields[2].value[0].datasetContactEmail.value", equalTo("finch@mailinator.com"))
.statusCode(OK.getStatusCode());

String pathToJsonFilePostPub = "doc/sphinx-guides/source/_static/api/dataset-add-metadata-after-pub.json";
Response addDataToPublishedVersion = UtilIT.addDatasetMetadataViaNative(datasetPersistentId, pathToJsonFilePostPub, apiToken);
addDataToPublishedVersion.prettyPrint();
addDataToPublishedVersion.then().assertThat().statusCode(OK.getStatusCode());

Response updatePIDMetadata = UtilIT.updateDatasetPIDMetadata(datasetPersistentId, apiToken);
updatePIDMetadata.prettyPrint();
updatePIDMetadata.then().assertThat()
.statusCode(OK.getStatusCode());
logger.info("datasetPersistentId: " + datasetPersistentId);

}

}
21 changes: 20 additions & 1 deletion src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ public static Response createRandomUser(String usernamePrefix) {
.post("/api/builtin-users?key=" + BUILTIN_USER_KEY + "&password=" + password);
return response;
}

public static Response createRandomUser() {

return createRandomUser("user");
Expand Down Expand Up @@ -422,6 +422,14 @@ static Response updateFieldLevelDatasetMetadataViaNative(String persistentId, St
.put("/api/datasets/:persistentId/editMetadata/?persistentId=" + persistentId + "&replace=true");
return response;
}

static Response updateDatasetPIDMetadata(String persistentId, String apiToken) {
Response response = given()
.header(API_TOKEN_HTTP_HEADER, apiToken)
.contentType("application/json")
.get("/api/datasets/:persistentId/modifyRegistrationMetadata/?persistentId=" + persistentId);
return response;
}

static private String getDatasetXml(String title, String author, String description) {
String xmlIn = "<?xml version=\"1.0\"?>\n"
Expand Down Expand Up @@ -848,6 +856,17 @@ static Response publishDatasetViaNativeApiDeprecated(String persistentId, String
.urlEncodingEnabled(false)
.get("/api/datasets/:persistentId/actions/:publish?type=" + majorOrMinor + "&persistentId=" + persistentId);
}

static Response modifyDatasetPIDMetadataViaApi(String persistentId, String apiToken) {
/**
* @todo This should be a POST rather than a GET:
* https://github.com/IQSS/dataverse/issues/2431
*/
return given()
.header(API_TOKEN_HTTP_HEADER, apiToken)
.urlEncodingEnabled(false)
.get("/api/datasets/:persistentId/&persistentId=" + persistentId);
}

static Response publishDatasetViaNativeApi(Integer datasetId, String majorOrMinor, String apiToken) {
/**
Expand Down

0 comments on commit 7f4e3f8

Please sign in to comment.