diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index 03187e0cdf8..a56c15fe359 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -2069,3 +2069,9 @@ admin.api.migrateHDL.failure.must.be.hdl.dataset=Dataset was not registered as a admin.api.migrateHDL.success=Dataset migrate HDL registration complete. Dataset re-registered successfully. admin.api.migrateHDL.failure=Failed to migrate Dataset Handle id: {0} admin.api.migrateHDL.failureWithException=Failed to migrate Dataset Handle id: {0} Unexpected exception: {1} + +#Datasets.java +datasets.api.updatePIDMetadata.failure.dataset.must.be.released=Modify Registration Metadata must be run on a published dataset. +datasets.api.updatePIDMetadata.auth.mustBeSuperUser=Forbidden. You must be a superuser. +datasets.api.updatePIDMetadata.success.for.single.dataset=Dataset {0} PID Metadata updated successfully. +datasets.api.updatePIDMetadata.success.for.update.all=All Dataset PID Metadata update completed successfully. diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 0baf05b740d..f687c662ccd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -75,6 +75,7 @@ import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.S3PackageImporter; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.impl.UpdateDvObjectPIDMetadataCommand; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.EjbUtil; @@ -88,6 +89,7 @@ import java.io.StringReader; import java.sql.Timestamp; import java.util.ArrayList; +import java.util.Arrays; import java.util.Date; import java.util.HashSet; import java.util.LinkedList; @@ -385,6 +387,41 @@ public Response updateDatasetTargetURLAll() { return ok("Update All Dataset target url completed"); }); } + + @GET + @Path("{id}/modifyRegistrationMetadata") + public Response updateDatasetPIDMetadata(@PathParam("id") String id) { + + try { + Dataset dataset = findDatasetOrDie(id); + if (!dataset.isReleased()) { + return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.failure.dataset.must.be.released")); + } + } catch (WrappedResponse ex) { + Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex); + } + + return response(req -> { + execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(id), req)); + List args = Arrays.asList(id); + return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.single.dataset", args)); + }); + } + + @GET + @Path("/modifyRegistrationPIDMetadataAll") + public Response updateDatasetPIDMetadataAll() { + return response( req -> { + datasetService.findAll().forEach( ds -> { + try { + execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(ds.getId().toString()), req)); + } catch (WrappedResponse ex) { + Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex); + } + }); + return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.update.all")); + }); + } @PUT @Path("{id}/versions/{versionId}") diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDvObjectPIDMetadataCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDvObjectPIDMetadataCommand.java new file mode 100644 index 00000000000..e2871148ff8 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDvObjectPIDMetadataCommand.java @@ -0,0 +1,71 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.GlobalIdServiceBean; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; +import edu.harvard.iq.dataverse.util.BundleUtil; +import java.sql.Timestamp; +import java.util.Collections; +import java.util.Date; + +/** + * + * @author skraffmi + * No required permissions because we are enforcing super user status in the execute + */ +@RequiredPermissions({}) +public class UpdateDvObjectPIDMetadataCommand extends AbstractVoidCommand { + + private final Dataset target; + + public UpdateDvObjectPIDMetadataCommand(Dataset target, DataverseRequest aRequest) { + super(aRequest, target); + this.target = target; + } + + @Override + protected void executeImpl(CommandContext ctxt) throws CommandException { + + + if (!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser()) { + throw new PermissionException(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.auth.mustBeSuperUser"), + this, Collections.singleton(Permission.EditDataset), target); + } + if (!this.target.isReleased()){ + //This is for the bulk update version of the api. + //We don't want to modify drafts, but we want it to keep going + //the single dataset update api checks for drafts before calling the command + return; + } + GlobalIdServiceBean idServiceBean = GlobalIdServiceBean.getBean(target.getProtocol(), ctxt); + try { + String doiRetString = idServiceBean.modifyIdentifierTargetURL(target); + if (doiRetString != null && doiRetString.contains(target.getIdentifier())) { + target.setGlobalIdCreateTime(new Timestamp(new Date().getTime())); + ctxt.em().merge(target); + ctxt.em().flush(); + for (DataFile df : target.getFiles()) { + doiRetString = idServiceBean.modifyIdentifierTargetURL(df); + if (doiRetString != null && doiRetString.contains(df.getIdentifier())) { + df.setGlobalIdCreateTime(new Timestamp(new Date().getTime())); + ctxt.em().merge(df); + ctxt.em().flush(); + } + } + } else { + //do nothing - we'll know it failed because the global id create time won't have been updated. + } + } catch (Exception e) { + //do nothing - item and the problem has been logged + } + } + +} diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index b604685f314..b53d0b6ea29 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -319,7 +319,7 @@ public void testCreatePublishDestroyDataset() { // FIXME: It would be awesome if we could just get a JSON object back instead. :( Map datasetContactFromExport = with(exportDatasetAsJson.body().asString()).param("datasetContact", "datasetContact") .getJsonObject("datasetVersion.metadataBlocks.citation.fields.find { fields -> fields.typeName == datasetContact }"); - System.out.println("datasetContactFromExport: " + datasetContactFromExport); + logger.info("datasetContactFromExport: " + datasetContactFromExport); assertEquals("datasetContact", datasetContactFromExport.get("typeName")); List valuesArray = (ArrayList) datasetContactFromExport.get("value"); @@ -449,7 +449,7 @@ public void testExport() { // FIXME: It would be awesome if we could just get a JSON object back instead. :( Map datasetContactFromExport = with(exportDatasetAsJson.body().asString()).param("datasetContact", "datasetContact") .getJsonObject("datasetVersion.metadataBlocks.citation.fields.find { fields -> fields.typeName == datasetContact }"); - System.out.println("datasetContactFromExport: " + datasetContactFromExport); + logger.info("datasetContactFromExport: " + datasetContactFromExport); assertEquals("datasetContact", datasetContactFromExport.get("typeName")); List valuesArray = (ArrayList) datasetContactFromExport.get("value"); @@ -636,7 +636,7 @@ public void testSequentialNumberAsIdentifierGenerationStyle() { .statusCode(OK.getStatusCode()); String identifier = JsonPath.from(datasetAsJson.getBody().asString()).getString("data.identifier"); - System.out.println("identifier: " + identifier); + logger.info("identifier: " + identifier); String numericPart = identifier.replace("FK2/", ""); //remove shoulder from identifier assertTrue(StringUtils.isNumeric(numericPart)); @@ -680,7 +680,7 @@ public void testPrivateUrl() { Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); createDatasetResponse.prettyPrint(); Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - System.out.println("dataset id: " + datasetId); + logger.info("dataset id: " + datasetId); Response createContributorResponse = UtilIT.createRandomUser(); String contributorUsername = UtilIT.getUsernameFromResponse(createContributorResponse); @@ -942,7 +942,7 @@ public void testFileChecksum() { Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); createDatasetResponse.prettyPrint(); Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - System.out.println("dataset id: " + datasetId); + logger.info("dataset id: " + datasetId); Response getDatasetJsonNoFiles = UtilIT.nativeGet(datasetId, apiToken); getDatasetJsonNoFiles.prettyPrint(); @@ -1546,4 +1546,60 @@ public void testDatasetLocksApi() { .statusCode(200); } + /** + * This test requires the root dataverse to be published to pass. + */ + @Test + public void testUpdatePIDMetadataAPI() { + + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + assertEquals(200, createUser.getStatusCode()); + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + Response makeSuperUser = UtilIT.makeSuperUser(username); + assertEquals(200, makeSuperUser.getStatusCode()); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.prettyPrint(); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.prettyPrint(); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + Response getDatasetJsonBeforePublishing = UtilIT.nativeGet(datasetId, apiToken); + getDatasetJsonBeforePublishing.prettyPrint(); + String protocol = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.protocol"); + String authority = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.authority"); + String identifier = JsonPath.from(getDatasetJsonBeforePublishing.getBody().asString()).getString("data.identifier"); + String datasetPersistentId = protocol + ":" + authority + "/" + identifier; + + Response publishDataverse = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken); + assertEquals(200, publishDataverse.getStatusCode()); + + Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken); + assertEquals(200, publishDataset.getStatusCode()); + + Response getDatasetJsonAfterPublishing = UtilIT.nativeGet(datasetId, apiToken); + getDatasetJsonAfterPublishing.prettyPrint(); + getDatasetJsonAfterPublishing.then().assertThat() + .body("data.latestVersion.versionNumber", equalTo(1)) + .body("data.latestVersion.versionMinorNumber", equalTo(0)) + .body("data.latestVersion.metadataBlocks.citation.fields[2].value[0].datasetContactEmail.value", equalTo("finch@mailinator.com")) + .statusCode(OK.getStatusCode()); + + String pathToJsonFilePostPub = "doc/sphinx-guides/source/_static/api/dataset-add-metadata-after-pub.json"; + Response addDataToPublishedVersion = UtilIT.addDatasetMetadataViaNative(datasetPersistentId, pathToJsonFilePostPub, apiToken); + addDataToPublishedVersion.prettyPrint(); + addDataToPublishedVersion.then().assertThat().statusCode(OK.getStatusCode()); + + Response updatePIDMetadata = UtilIT.updateDatasetPIDMetadata(datasetPersistentId, apiToken); + updatePIDMetadata.prettyPrint(); + updatePIDMetadata.then().assertThat() + .statusCode(OK.getStatusCode()); + logger.info("datasetPersistentId: " + datasetPersistentId); + + } + } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 2d08dcd8d63..29b3bd7755b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -102,7 +102,7 @@ public static Response createRandomUser(String usernamePrefix) { .post("/api/builtin-users?key=" + BUILTIN_USER_KEY + "&password=" + password); return response; } - + public static Response createRandomUser() { return createRandomUser("user"); @@ -422,6 +422,14 @@ static Response updateFieldLevelDatasetMetadataViaNative(String persistentId, St .put("/api/datasets/:persistentId/editMetadata/?persistentId=" + persistentId + "&replace=true"); return response; } + + static Response updateDatasetPIDMetadata(String persistentId, String apiToken) { + Response response = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .contentType("application/json") + .get("/api/datasets/:persistentId/modifyRegistrationMetadata/?persistentId=" + persistentId); + return response; + } static private String getDatasetXml(String title, String author, String description) { String xmlIn = "\n" @@ -848,6 +856,17 @@ static Response publishDatasetViaNativeApiDeprecated(String persistentId, String .urlEncodingEnabled(false) .get("/api/datasets/:persistentId/actions/:publish?type=" + majorOrMinor + "&persistentId=" + persistentId); } + + static Response modifyDatasetPIDMetadataViaApi(String persistentId, String apiToken) { + /** + * @todo This should be a POST rather than a GET: + * https://github.com/IQSS/dataverse/issues/2431 + */ + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .urlEncodingEnabled(false) + .get("/api/datasets/:persistentId/&persistentId=" + persistentId); + } static Response publishDatasetViaNativeApi(Integer datasetId, String majorOrMinor, String apiToken) { /**