diff --git a/doc/release-notes/10901deaccessioned file edit fix.md b/doc/release-notes/10901deaccessioned file edit fix.md new file mode 100644 index 00000000000..db12b1fc978 --- /dev/null +++ b/doc/release-notes/10901deaccessioned file edit fix.md @@ -0,0 +1 @@ +When a dataset was deaccessioned and was the only previous version it will cause an error when trying to update the files. \ No newline at end of file diff --git a/doc/release-notes/9408-fix-facets-labels-not-translated-in-result-block.md b/doc/release-notes/9408-fix-facets-labels-not-translated-in-result-block.md new file mode 100644 index 00000000000..344859e2dbd --- /dev/null +++ b/doc/release-notes/9408-fix-facets-labels-not-translated-in-result-block.md @@ -0,0 +1,7 @@ +## Fix facets filter labels not translated in result block + +On the main page, it's possible to filter results using search facets. If internationalization (i18n) has been activated in the Dataverse installation, allowing pages to be displayed in several languages, the facets are translated in the filter column. However, they aren't translated in the search results and remain in the default language, English. + +This version of Dataverse fix this, and includes internationalization in the facets visible in the search results section. + +For more information, see issue [#9408](https://github.com/IQSS/dataverse/issues/9408) and pull request [#10158](https://github.com/IQSS/dataverse/pull/10158) diff --git a/doc/release-notes/9650-5-improve-list-linked-dataverses-API.md b/doc/release-notes/9650-5-improve-list-linked-dataverses-API.md new file mode 100644 index 00000000000..8c79955891b --- /dev/null +++ b/doc/release-notes/9650-5-improve-list-linked-dataverses-API.md @@ -0,0 +1,5 @@ +The following API have been added: + +/api/datasets/{datasetId}/links + +It lists the linked dataverses to a dataset. It can be executed only by administrators. \ No newline at end of file diff --git a/doc/sphinx-guides/source/admin/dataverses-datasets.rst b/doc/sphinx-guides/source/admin/dataverses-datasets.rst index b4d365c4fd4..7c03a6f80c0 100644 --- a/doc/sphinx-guides/source/admin/dataverses-datasets.rst +++ b/doc/sphinx-guides/source/admin/dataverses-datasets.rst @@ -129,15 +129,21 @@ Lists the link(s) created between a dataset and a Dataverse collection (see the curl -H "X-Dataverse-key: $API_TOKEN" http://$SERVER/api/datasets/$linked-dataset-id/links -It returns a list in the following format: +It returns a list in the following format (new format as of v6.4): .. code-block:: json { "status": "OK", "data": { - "dataverses that link to dataset id 56782": [ - "crc990 (id 18802)" + "id": 5, + "identifier": "FK2/OTCWMM", + "linked-dataverses": [ + { + "id": 2, + "alias": "dataverse1", + "displayName": "Lab experiments 2023 June" + } ] } } diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index c5890fd9996..92cd4fc941b 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -12,7 +12,7 @@ v6.4 - **/api/datasets/$dataset-id/modifyRegistration**: Changed from GET to POST - **/api/datasets/modifyRegistrationPIDMetadataAll**: Changed from GET to POST - +- **/api/datasets/{identifier}/links**: The GET endpoint returns a list of Dataverses linked to the given Dataset. The format of the response has changes for v6.4 making it backward incompatible. v6.3 ---- diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 52cb7d6f2dc..40ed491a302 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -333,15 +333,20 @@ public DatasetVersion getLatestVersion() { return getVersions().get(0); } - public DatasetVersion getLatestVersionForCopy() { + public DatasetVersion getLatestVersionForCopy(boolean includeDeaccessioned) { for (DatasetVersion testDsv : getVersions()) { - if (testDsv.isReleased() || testDsv.isArchived()) { + if (testDsv.isReleased() || testDsv.isArchived() + || (testDsv.isDeaccessioned() && includeDeaccessioned)) { return testDsv; } } return getVersions().get(0); } + public DatasetVersion getLatestVersionForCopy(){ + return getLatestVersionForCopy(false); + } + public List getVersions() { return versions; } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index c7657768d16..369a22fe8d7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -2077,10 +2077,16 @@ public Response getLinks(@Context ContainerRequestContext crc, @PathParam("id") List dvsThatLinkToThisDatasetId = dataverseSvc.findDataversesThatLinkToThisDatasetId(datasetId); JsonArrayBuilder dataversesThatLinkToThisDatasetIdBuilder = Json.createArrayBuilder(); for (Dataverse dataverse : dvsThatLinkToThisDatasetId) { - dataversesThatLinkToThisDatasetIdBuilder.add(dataverse.getAlias() + " (id " + dataverse.getId() + ")"); + JsonObjectBuilder datasetBuilder = Json.createObjectBuilder(); + datasetBuilder.add("id", dataverse.getId()); + datasetBuilder.add("alias", dataverse.getAlias()); + datasetBuilder.add("displayName", dataverse.getDisplayName()); + dataversesThatLinkToThisDatasetIdBuilder.add(datasetBuilder.build()); } JsonObjectBuilder response = Json.createObjectBuilder(); - response.add("dataverses that link to dataset id " + datasetId, dataversesThatLinkToThisDatasetIdBuilder); + response.add("id", datasetId); + response.add("identifier", dataset.getIdentifier()); + response.add("linked-dataverses", dataversesThatLinkToThisDatasetIdBuilder); return ok(response); } catch (WrappedResponse wr) { return wr.getResponse(); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index d786aab35a8..633d420c527 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -343,10 +343,10 @@ public Response deleteFileInDataset(@Context ContainerRequestContext crc, @PathP DataFile dataFile = findDataFileOrDie(fileIdOrPersistentId); FileMetadata fileToDelete = dataFile.getLatestFileMetadata(); Dataset dataset = dataFile.getOwner(); - DatasetVersion v = dataset.getOrCreateEditVersion(); + dataset.getOrCreateEditVersion(); deletePhysicalFile = !dataFile.isReleased(); - UpdateDatasetVersionCommand update_cmd = new UpdateDatasetVersionCommand(dataset, dvRequest, Arrays.asList(fileToDelete), v); + UpdateDatasetVersionCommand update_cmd = new UpdateDatasetVersionCommand(dataset, dvRequest, Arrays.asList(fileToDelete)); update_cmd.setValidateLenient(true); try { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java index 7651e81af5b..396a7aca4df 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java @@ -126,7 +126,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { DatasetVersion latestVersion = theDataset.getLatestVersion(); if(persistedVersion==null) { Long id = latestVersion.getId(); - persistedVersion = ctxt.datasetVersion().find(id!=null ? id: getDataset().getLatestVersionForCopy().getId()); + persistedVersion = ctxt.datasetVersion().find(id!=null ? id: getDataset().getLatestVersionForCopy(true).getId()); } //Get or create (currently only when called with fmVarMet != null) a new edit version DatasetVersion editVersion = theDataset.getOrCreateEditVersion(fmVarMet); diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java index 4f3f6e46e48..9328dd03ca2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java @@ -34,6 +34,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.MissingResourceException; import java.util.Optional; import java.util.Set; import java.util.logging.Logger; @@ -1231,40 +1232,33 @@ public String getTypeFromFilterQuery(String filterQuery) { } public List getFriendlyNamesFromFilterQuery(String filterQuery) { - - - if ((filterQuery == null)|| - (datasetfieldFriendlyNamesBySolrField == null)|| - (staticSolrFieldFriendlyNamesBySolrField==null)){ + + if ((filterQuery == null) || + (datasetfieldFriendlyNamesBySolrField == null) || + (staticSolrFieldFriendlyNamesBySolrField == null)) { return null; } - - if(!filterQuery.contains(":")) { + + if (!filterQuery.contains(":")) { return null; } - + int index = filterQuery.indexOf(":"); String key = filterQuery.substring(0,index); String value = filterQuery.substring(index+1); - List friendlyNames = new ArrayList<>(); + // friendlyNames get 2 entries : key and value + List friendlyNames = new ArrayList<>(2); + // Get dataset field friendly name from default ressource bundle file String datasetfieldFriendyName = datasetfieldFriendlyNamesBySolrField.get(key); if (datasetfieldFriendyName != null) { friendlyNames.add(datasetfieldFriendyName); } else { + // Get non dataset field friendly name from "staticSearchFields" ressource bundle file String nonDatasetSolrField = staticSolrFieldFriendlyNamesBySolrField.get(key); if (nonDatasetSolrField != null) { friendlyNames.add(nonDatasetSolrField); - } else if (key.equals(SearchFields.PUBLICATION_STATUS)) { - /** - * @todo Refactor this quick fix for - * https://github.com/IQSS/dataverse/issues/618 . We really need - * to get rid of all the reflection that's happening with - * solrQueryResponse.getStaticSolrFieldFriendlyNamesBySolrField() - * and - */ - friendlyNames.add("Publication Status"); } else { // meh. better than nuthin' friendlyNames.add(key); @@ -1276,9 +1270,13 @@ public List getFriendlyNamesFromFilterQuery(String filterQuery) { String valueWithoutQuotes = noTrailingQuote; if (key.equals(SearchFields.METADATA_TYPES) && getDataverse() != null && getDataverse().getMetadataBlockFacets() != null) { - Optional friendlyName = getDataverse().getMetadataBlockFacets().stream().filter(block -> block.getMetadataBlock().getName().equals(valueWithoutQuotes)).findFirst().map(block -> block.getMetadataBlock().getLocaleDisplayFacet()); + Optional friendlyName = getDataverse().getMetadataBlockFacets() + .stream() + .filter(block -> block.getMetadataBlock().getName().equals(valueWithoutQuotes)) + .findFirst() + .map(block -> block.getMetadataBlock().getLocaleDisplayFacet()); logger.fine(String.format("action=getFriendlyNamesFromFilterQuery key=%s value=%s friendlyName=%s", key, value, friendlyName)); - if(friendlyName.isPresent()) { + if (friendlyName.isPresent()) { friendlyNames.add(friendlyName.get()); return friendlyNames; } @@ -1290,7 +1288,15 @@ public List getFriendlyNamesFromFilterQuery(String filterQuery) { } } - friendlyNames.add(valueWithoutQuotes); + // Get value friendly name from default ressource bundle file + String valueFriendlyName; + try { + valueFriendlyName = BundleUtil.getStringFromPropertyFile(noTrailingQuote, "Bundle"); + } catch (MissingResourceException e) { + valueFriendlyName = noTrailingQuote; + } + + friendlyNames.add(valueFriendlyName); return friendlyNames; } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index f52aa4fe9bd..8e38a96fc97 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -667,6 +667,60 @@ public void testCreatePublishDestroyDataset() { deleteDatasetResponse.prettyPrint(); assertEquals(200, deleteDatasetResponse.getStatusCode()); + // Start of test of deleting a file from a deaccessioned version. + + // Create Dataset for deaccession test. + Response deaccessionTestDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + deaccessionTestDataset.prettyPrint(); + deaccessionTestDataset.then().assertThat().statusCode(CREATED.getStatusCode()); + Integer deaccessionTestDatasetId = UtilIT.getDatasetIdFromResponse(deaccessionTestDataset); + + // File upload for deaccession test. + String pathToFile = "src/main/webapp/resources/images/dataverseproject.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(deaccessionTestDatasetId.toString(), pathToFile, apiToken); + uploadResponse.prettyPrint(); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + Integer deaccessionTestFileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); + + // Publish Dataset for deaccession test. + Response deaccessionTestPublishResponse = UtilIT.publishDatasetViaNativeApi(deaccessionTestDatasetId, "major", apiToken); + deaccessionTestPublishResponse.prettyPrint(); + + // Deaccession Dataset for deaccession test. + Response deaccessionTestDatasetResponse = UtilIT.deaccessionDataset(deaccessionTestDatasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken); + deaccessionTestDatasetResponse.prettyPrint(); + deaccessionTestDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Version check for deaccession test - Deaccessioned. + Response deaccessionTestVersions = UtilIT.getDatasetVersions(deaccessionTestDatasetId.toString(), apiToken); + deaccessionTestVersions.prettyPrint(); + deaccessionTestVersions.then().assertThat() + .body("data[0].latestVersionPublishingState", equalTo("DEACCESSIONED")) + .statusCode(OK.getStatusCode()); + + // File deletion / Draft creation due diligence check for deaccession test. + Response deaccessionTestDeleteFile = UtilIT.deleteFileInDataset(deaccessionTestFileId, apiToken); + deaccessionTestDeleteFile.prettyPrint(); + deaccessionTestDeleteFile + .then().assertThat() + .statusCode(OK.getStatusCode()); + + // Version check for deaccession test - Draft. + deaccessionTestVersions = UtilIT.getDatasetVersions(deaccessionTestDatasetId.toString(), apiToken); + deaccessionTestVersions.prettyPrint(); + deaccessionTestVersions.then().assertThat() + .body("data[0].latestVersionPublishingState", equalTo("DRAFT")) + .statusCode(OK.getStatusCode()); + + // Deleting Dataset for deaccession test. + Response deaccessionTestDelete = UtilIT.destroyDataset(deaccessionTestDatasetId, apiToken); + deaccessionTestDelete.prettyPrint(); + deaccessionTestDelete.then() + .assertThat() + .statusCode(OK.getStatusCode()); + + // End of deaccession test. + Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, apiToken); deleteDataverseResponse.prettyPrint(); assertEquals(200, deleteDataverseResponse.getStatusCode()); @@ -2963,6 +3017,34 @@ public void testLinkingDatasets() { linkDataset.then().assertThat() .statusCode(OK.getStatusCode()); + // Link another to test the list of linked datasets + Response createDataverse3 = UtilIT.createRandomDataverse(apiToken); + createDataverse3.prettyPrint(); + createDataverse3.then().assertThat() + .statusCode(CREATED.getStatusCode()); + String dataverse3Alias = UtilIT.getAliasFromResponse(createDataverse3); + Integer dataverse3Id = UtilIT.getDatasetIdFromResponse(createDataverse3); + linkDataset = UtilIT.linkDataset(datasetPid, dataverse3Alias, superuserApiToken); + linkDataset.prettyPrint(); + linkDataset.then().assertThat() + .statusCode(OK.getStatusCode()); + // get the list in Json format + Response linkDatasetsResponse = UtilIT.getDatasetLinks(datasetPid, superuserApiToken); + linkDatasetsResponse.prettyPrint(); + linkDatasetsResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + JsonObject linkDatasets = Json.createReader(new StringReader(linkDatasetsResponse.asString())).readObject(); + JsonArray lst = linkDatasets.getJsonObject("data").getJsonArray("linked-dataverses"); + List ids = List.of(dataverse2Id, dataverse3Id); + List uniqueids = new ArrayList<>(); + assertEquals(ids.size(), lst.size()); + for (int i = 0; i < lst.size(); i++) { + int id = lst.getJsonObject(i).getInt("id"); + assertTrue(ids.contains(id)); + assertFalse(uniqueids.contains(id)); + uniqueids.add(id); + } + //Experimental code for trying to trick test into thinking the dataset has been harvested /* createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverse1Alias, apiToken); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MoveIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MoveIT.java index f7135ce7f3b..8951b0bd42e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/MoveIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/MoveIT.java @@ -278,8 +278,8 @@ public void testMoveLinkedDataset() { .body("message", equalTo("Use the query parameter forceMove=true to complete the move. This dataset is linked to the new host dataverse or one of its parents. This move would remove the link to this dataset. ")); JsonObject linksBeforeData = Json.createReader(new StringReader(getLinksBefore.asString())).readObject(); - assertEquals("OK", linksBeforeData.getString("status")); - assertEquals(dataverse2Alias + " (id " + dataverse2Id + ")", linksBeforeData.getJsonObject("data").getJsonArray("dataverses that link to dataset id " + datasetId).getString(0)); + assertEquals(datasetId, linksBeforeData.getJsonObject("data").getInt("id")); + assertEquals(dataverse2Id, linksBeforeData.getJsonObject("data").getJsonArray("linked-dataverses").get(0).asJsonObject().getInt("id")); boolean forceMove = true; Response forceMoveLinkedDataset = UtilIT.moveDataset(datasetId.toString(), dataverse2Alias, forceMove, superuserApiToken); @@ -308,8 +308,7 @@ public void testMoveLinkedDataset() { JsonObject linksAfterData = Json.createReader(new StringReader(getLinksAfter.asString())).readObject(); assertEquals("OK", linksAfterData.getString("status")); - assertEquals(0, linksAfterData.getJsonObject("data").getJsonArray("dataverses that link to dataset id " + datasetId).size()); - + assertEquals(0, linksAfterData.getJsonObject("data").getJsonArray("linked-dataverses").size()); } @Test